[ 565.403774] env[61594]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=61594) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.404147] env[61594]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=61594) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.404272] env[61594]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=61594) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 565.404537] env[61594]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 565.490532] env[61594]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=61594) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 565.500807] env[61594]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=61594) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 565.544495] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] Creating reply queue: reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 565.552798] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] Expecting reply to msg 5916ad2617bb49beb1eb837382e6100e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 565.567417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5916ad2617bb49beb1eb837382e6100e [ 565.638319] env[61594]: INFO nova.virt.driver [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 565.710808] env[61594]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.711069] env[61594]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.713780] env[61594]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=61594) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 568.988217] env[61594]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-ea89f797-ea5c-4f9f-bd42-6eaa0b782d04 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.004550] env[61594]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=61594) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 569.004712] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-4bcc52ed-cd54-467a-83bd-be6b5d76f741 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.037920] env[61594]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 60b47. [ 569.038073] env[61594]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.327s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.038601] env[61594]: INFO nova.virt.vmwareapi.driver [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] VMware vCenter version: 7.0.3 [ 569.042406] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8894d38-c240-4717-87c2-f34f13cb022c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.059864] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87066a02-1bc9-4c09-9a73-bd7db44c5e39 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.065681] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f1a7c9-a3df-4252-82df-83401e725fcc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.072183] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ee619b-a91b-41ff-8c41-897401b54f6a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.084882] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebcedb0-411c-4677-8b55-2f08f0867b49 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.090598] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58d520e-a56c-42e2-aee6-6d5dbef9537a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.120769] env[61594]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-e65231e4-3864-4278-a8d5-e865cc79629c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.125453] env[61594]: DEBUG nova.virt.vmwareapi.driver [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] Extension org.openstack.compute already exists. {{(pid=61594) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 569.128077] env[61594]: INFO nova.compute.provider_config [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 569.128724] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] Expecting reply to msg 04e1212483474319b040136908f1245e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.146358] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04e1212483474319b040136908f1245e [ 569.147368] env[61594]: DEBUG nova.context [None req-2e0dd90e-db62-43b3-9441-26fc19961d0d None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),bfcf983b-0431-4fe7-9afc-0efa54598587(cell1) {{(pid=61594) load_cells /opt/stack/nova/nova/context.py:464}} [ 569.149621] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.149833] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.150576] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.150983] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Acquiring lock "bfcf983b-0431-4fe7-9afc-0efa54598587" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.151185] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Lock "bfcf983b-0431-4fe7-9afc-0efa54598587" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.152182] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Lock "bfcf983b-0431-4fe7-9afc-0efa54598587" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.172979] env[61594]: INFO dbcounter [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Registered counter for database nova_cell0 [ 569.181112] env[61594]: INFO dbcounter [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Registered counter for database nova_cell1 [ 569.184219] env[61594]: DEBUG oslo_db.sqlalchemy.engines [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61594) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 569.184574] env[61594]: DEBUG oslo_db.sqlalchemy.engines [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=61594) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 569.189113] env[61594]: DEBUG dbcounter [-] [61594] Writer thread running {{(pid=61594) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 569.189838] env[61594]: DEBUG dbcounter [-] [61594] Writer thread running {{(pid=61594) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 569.191982] env[61594]: ERROR nova.db.main.api [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 569.191982] env[61594]: result = function(*args, **kwargs) [ 569.191982] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.191982] env[61594]: return func(*args, **kwargs) [ 569.191982] env[61594]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 569.191982] env[61594]: result = fn(*args, **kwargs) [ 569.191982] env[61594]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 569.191982] env[61594]: return f(*args, **kwargs) [ 569.191982] env[61594]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 569.191982] env[61594]: return db.service_get_minimum_version(context, binaries) [ 569.191982] env[61594]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 569.191982] env[61594]: _check_db_access() [ 569.191982] env[61594]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 569.191982] env[61594]: stacktrace = ''.join(traceback.format_stack()) [ 569.191982] env[61594]: [ 569.193008] env[61594]: ERROR nova.db.main.api [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 569.193008] env[61594]: result = function(*args, **kwargs) [ 569.193008] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.193008] env[61594]: return func(*args, **kwargs) [ 569.193008] env[61594]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 569.193008] env[61594]: result = fn(*args, **kwargs) [ 569.193008] env[61594]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 569.193008] env[61594]: return f(*args, **kwargs) [ 569.193008] env[61594]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 569.193008] env[61594]: return db.service_get_minimum_version(context, binaries) [ 569.193008] env[61594]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 569.193008] env[61594]: _check_db_access() [ 569.193008] env[61594]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 569.193008] env[61594]: stacktrace = ''.join(traceback.format_stack()) [ 569.193008] env[61594]: [ 569.193358] env[61594]: WARNING nova.objects.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 569.193525] env[61594]: WARNING nova.objects.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Failed to get minimum service version for cell bfcf983b-0431-4fe7-9afc-0efa54598587 [ 569.193947] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Acquiring lock "singleton_lock" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.194121] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Acquired lock "singleton_lock" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.194366] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Releasing lock "singleton_lock" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.194682] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Full set of CONF: {{(pid=61594) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 569.194825] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ******************************************************************************** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2600}} [ 569.194955] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] Configuration options gathered from: {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2601}} [ 569.195106] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 569.195302] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2603}} [ 569.195432] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ================================================================================ {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2605}} [ 569.195639] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] allow_resize_to_same_host = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.195809] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] arq_binding_timeout = 300 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.195940] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] backdoor_port = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.196081] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] backdoor_socket = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.196252] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] block_device_allocate_retries = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.196414] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] block_device_allocate_retries_interval = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.196586] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cert = self.pem {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.196751] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.196917] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute_monitors = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.197164] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] config_dir = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.197358] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] config_drive_format = iso9660 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.197497] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.197664] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] config_source = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.197833] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] console_host = devstack {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.198007] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] control_exchange = nova {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.198178] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cpu_allocation_ratio = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.198341] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] daemon = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.198509] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] debug = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.198670] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] default_access_ip_network_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.198834] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] default_availability_zone = nova {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.198988] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] default_ephemeral_format = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.199190] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] default_green_pool_size = 1000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.199431] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.199596] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] default_schedule_zone = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.199752] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] disk_allocation_ratio = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.199910] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] enable_new_services = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.200117] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] enabled_apis = ['osapi_compute'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.200296] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] enabled_ssl_apis = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.200461] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] flat_injected = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.200621] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] force_config_drive = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.200782] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] force_raw_images = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.200953] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] graceful_shutdown_timeout = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.201128] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] heal_instance_info_cache_interval = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.201349] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] host = cpu-1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.201551] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.201722] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] initial_disk_allocation_ratio = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.201888] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] initial_ram_allocation_ratio = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.202114] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.202287] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instance_build_timeout = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.202491] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instance_delete_interval = 300 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.202679] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instance_format = [instance: %(uuid)s] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.202852] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instance_name_template = instance-%08x {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.203029] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instance_usage_audit = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.203209] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instance_usage_audit_period = month {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.203382] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.203549] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] instances_path = /opt/stack/data/nova/instances {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.203719] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] internal_service_availability_zone = internal {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.203878] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] key = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.204056] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] live_migration_retry_count = 30 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.204230] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_config_append = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.204400] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.204561] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_dir = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.204722] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.204850] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_options = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.205030] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_rotate_interval = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.205208] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_rotate_interval_type = days {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.205378] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] log_rotation_type = none {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.205544] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.205691] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.205868] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.206046] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.206182] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.206348] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] long_rpc_timeout = 1800 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.206509] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] max_concurrent_builds = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.206668] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] max_concurrent_live_migrations = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.206827] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] max_concurrent_snapshots = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.206985] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] max_local_block_devices = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.207157] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] max_logfile_count = 30 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.207319] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] max_logfile_size_mb = 200 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.207475] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] maximum_instance_delete_attempts = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.207646] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] metadata_listen = 0.0.0.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.207812] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] metadata_listen_port = 8775 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.207979] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] metadata_workers = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.208153] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] migrate_max_retries = -1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.208323] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] mkisofs_cmd = genisoimage {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.208534] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] my_block_storage_ip = 10.180.1.21 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.208663] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] my_ip = 10.180.1.21 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.208825] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] network_allocate_retries = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.209014] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.209213] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] osapi_compute_listen = 0.0.0.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.209381] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] osapi_compute_listen_port = 8774 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.209552] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] osapi_compute_unique_server_name_scope = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.209721] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] osapi_compute_workers = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.209882] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] password_length = 12 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.210067] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] periodic_enable = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.210249] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] periodic_fuzzy_delay = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.210423] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] pointer_model = usbtablet {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.210593] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] preallocate_images = none {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.210754] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] publish_errors = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.210886] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] pybasedir = /opt/stack/nova {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.211055] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ram_allocation_ratio = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.211226] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] rate_limit_burst = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.211411] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] rate_limit_except_level = CRITICAL {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.211590] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] rate_limit_interval = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.211752] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] reboot_timeout = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.211913] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] reclaim_instance_interval = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.212087] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] record = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.212266] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] reimage_timeout_per_gb = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.212432] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] report_interval = 120 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.212595] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] rescue_timeout = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.212755] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] reserved_host_cpus = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.212915] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] reserved_host_disk_mb = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.213088] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] reserved_host_memory_mb = 512 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.213256] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] reserved_huge_pages = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.213419] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] resize_confirm_window = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.213581] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] resize_fs_using_block_device = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.213742] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] resume_guests_state_on_host_boot = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.213913] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.214084] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] rpc_response_timeout = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.214258] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] run_external_periodic_tasks = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.214432] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] running_deleted_instance_action = reap {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.214598] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] running_deleted_instance_poll_interval = 1800 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.214759] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] running_deleted_instance_timeout = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.214919] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler_instance_sync_interval = 120 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.215098] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_down_time = 720 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.215273] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] servicegroup_driver = db {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.215437] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] shelved_offload_time = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.215596] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] shelved_poll_interval = 3600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.215762] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] shutdown_timeout = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.215923] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] source_is_ipv6 = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.216094] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ssl_only = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.216343] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.216513] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] sync_power_state_interval = 600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.216678] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] sync_power_state_pool_size = 1000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.216850] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] syslog_log_facility = LOG_USER {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.217017] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] tempdir = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.217185] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] timeout_nbd = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.217361] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] transport_url = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.217562] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] update_resources_interval = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.217745] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] use_cow_images = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.217912] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] use_eventlog = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.218098] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] use_journal = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.218270] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] use_json = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.218431] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] use_rootwrap_daemon = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.218592] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] use_stderr = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.218750] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] use_syslog = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.218907] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vcpu_pin_set = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.219099] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plugging_is_fatal = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.219288] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plugging_timeout = 300 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.219457] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] virt_mkfs = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.219618] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] volume_usage_poll_interval = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.219778] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] watch_log_file = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.219944] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] web = /usr/share/spice-html5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 569.220166] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_concurrency.disable_process_locking = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.220473] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.220656] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.220824] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.220998] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.221190] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.221362] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.221572] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.auth_strategy = keystone {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.221745] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.compute_link_prefix = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.221922] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.222112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.dhcp_domain = novalocal {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.222286] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.enable_instance_password = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.222450] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.glance_link_prefix = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.222615] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.222786] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.222948] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.instance_list_per_project_cells = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.223124] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.list_records_by_skipping_down_cells = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.223292] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.local_metadata_per_cell = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.223460] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.max_limit = 1000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.223626] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.metadata_cache_expiration = 15 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.223801] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.neutron_default_tenant_id = default {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.223967] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.use_neutron_default_nets = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.224151] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.224320] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.224486] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.224658] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.224826] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.vendordata_dynamic_targets = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.224992] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.vendordata_jsonfile_path = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.225190] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.225385] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.backend = dogpile.cache.memcached {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.225553] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.backend_argument = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.225724] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.config_prefix = cache.oslo {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.225894] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.dead_timeout = 60.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.226072] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.debug_cache_backend = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.226247] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.enable_retry_client = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.226407] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.enable_socket_keepalive = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.226577] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.enabled = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.226742] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.enforce_fips_mode = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.226905] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.expiration_time = 600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.227080] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.hashclient_retry_attempts = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.227255] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.hashclient_retry_delay = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.227420] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_dead_retry = 300 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.227581] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_password = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.227744] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.227906] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.228078] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_pool_maxsize = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.228246] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.228408] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_sasl_enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.228588] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.228753] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_socket_timeout = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.228911] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.memcache_username = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.229108] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.proxies = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.229287] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.redis_password = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.229462] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.redis_sentinel_service_name = mymaster {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.229667] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.229857] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.redis_server = localhost:6379 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.230047] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.redis_socket_timeout = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.230235] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.redis_username = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.230406] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.retry_attempts = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.230575] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.retry_delay = 0.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.230740] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.socket_keepalive_count = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.230901] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.socket_keepalive_idle = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.231076] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.socket_keepalive_interval = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.231242] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.tls_allowed_ciphers = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.231400] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.tls_cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.231556] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.tls_certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.231719] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.tls_enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.231877] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cache.tls_keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.232061] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.232245] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.auth_type = password {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.232410] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.232586] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.catalog_info = volumev3::publicURL {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.232748] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.232914] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.233091] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.cross_az_attach = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.233263] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.debug = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.233425] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.endpoint_template = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.233590] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.http_retries = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.233756] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.233915] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.234101] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.os_region_name = RegionOne {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.234276] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.234437] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cinder.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.234608] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.234770] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.cpu_dedicated_set = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.234930] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.cpu_shared_set = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.235110] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.image_type_exclude_list = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.235282] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.235447] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.max_concurrent_disk_ops = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.235611] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.max_disk_devices_to_attach = -1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.235774] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.235946] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.236124] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.resource_provider_association_refresh = 300 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.236291] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.shutdown_retry_interval = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.236472] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.236651] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] conductor.workers = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.236827] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] console.allowed_origins = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.236988] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] console.ssl_ciphers = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.237177] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] console.ssl_minimum_version = default {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.237350] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] consoleauth.enforce_session_timeout = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.237520] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] consoleauth.token_ttl = 600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.237692] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.237853] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.238028] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.238197] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.connect_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.238360] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.connect_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.238519] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.endpoint_override = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.238683] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.238842] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.239009] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.max_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.239207] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.min_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.239376] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.region_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.239538] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.retriable_status_codes = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.239698] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.service_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.239870] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.service_type = accelerator {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.240055] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.240238] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.status_code_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.240403] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.status_code_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.240565] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.240747] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.240910] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] cyborg.version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.241102] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.backend = sqlalchemy {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.241280] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.connection = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.241473] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.connection_debug = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.241660] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.connection_parameters = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.241829] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.connection_recycle_time = 3600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.241994] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.connection_trace = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.242174] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.db_inc_retry_interval = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.242341] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.db_max_retries = 20 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.242504] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.db_max_retry_interval = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.242664] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.db_retry_interval = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.242825] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.max_overflow = 50 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.242985] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.max_pool_size = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.243159] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.max_retries = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.243333] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.243491] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.mysql_wsrep_sync_wait = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.243650] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.pool_timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.243813] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.retry_interval = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.243970] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.slave_connection = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245331] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.sqlite_synchronous = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245331] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] database.use_db_reconnect = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245331] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.backend = sqlalchemy {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245331] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.connection = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245331] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.connection_debug = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245331] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.connection_parameters = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245595] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.connection_recycle_time = 3600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245595] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.connection_trace = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245595] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.db_inc_retry_interval = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245595] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.db_max_retries = 20 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.245731] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.db_max_retry_interval = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.248445] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.db_retry_interval = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.248445] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.max_overflow = 50 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.248445] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.max_pool_size = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.248445] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.max_retries = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.248445] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.248445] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.249953] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.pool_timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.249953] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.retry_interval = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.249953] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.slave_connection = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.249953] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] api_database.sqlite_synchronous = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.249953] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] devices.enabled_mdev_types = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.249953] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251536] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ephemeral_storage_encryption.default_format = luks {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251536] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ephemeral_storage_encryption.enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251536] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251536] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.api_servers = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251536] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251536] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251811] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251811] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.connect_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251811] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.connect_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251811] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.debug = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251811] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.default_trusted_certificate_ids = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.251811] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.enable_certificate_validation = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.enable_rbd_download = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.endpoint_override = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.max_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.min_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252367] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.num_retries = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252367] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.rbd_ceph_conf = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252367] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.rbd_connect_timeout = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252367] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.rbd_pool = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252367] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.rbd_user = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252367] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.region_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252626] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.retriable_status_codes = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252626] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.service_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252626] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.service_type = image {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252626] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252626] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.status_code_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252626] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.status_code_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252896] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.252947] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.253097] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.verify_glance_signatures = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.253263] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] glance.version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.253451] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] guestfs.debug = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.253640] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] mks.enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.254014] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.254211] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] image_cache.manager_interval = 2400 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.254388] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] image_cache.precache_concurrency = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.254561] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] image_cache.remove_unused_base_images = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.254730] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.254899] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.255090] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] image_cache.subdirectory_name = _base {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.255272] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.api_max_retries = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.255438] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.api_retry_interval = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.255601] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.255764] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.auth_type = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.255925] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.256122] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.256352] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.256531] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.conductor_group = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.256694] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.connect_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.256855] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.connect_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.257024] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.endpoint_override = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.257199] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.257364] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.257526] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.max_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.257685] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.min_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.257851] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.peer_list = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.258018] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.region_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.258183] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.retriable_status_codes = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.258352] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.serial_console_state_timeout = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.258512] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.service_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.258684] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.service_type = baremetal {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.258848] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.shard = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.259014] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.259219] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.status_code_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.259388] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.status_code_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.259548] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.259728] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.259891] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ironic.version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.260088] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.260272] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] key_manager.fixed_key = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.260456] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.260620] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.barbican_api_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.260780] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.barbican_endpoint = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.260952] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.barbican_endpoint_type = public {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.261128] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.barbican_region_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.261293] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.261479] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.261658] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.261822] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.261981] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.262167] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.number_of_retries = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.262335] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.retry_delay = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.262498] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.send_service_user_token = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.262661] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.262819] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.262981] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.verify_ssl = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.263156] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican.verify_ssl_path = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.263324] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.263487] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.auth_type = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.263647] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.263803] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.263964] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.264141] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.264303] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.264465] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.264622] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] barbican_service_user.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.264786] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.approle_role_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.264945] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.approle_secret_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.265118] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.265284] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.265458] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.265649] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.265815] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.265988] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.kv_mountpoint = secret {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.266165] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.kv_path = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.266333] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.kv_version = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.266493] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.namespace = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.266652] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.root_token_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.266815] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.267008] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.ssl_ca_crt_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.267224] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.267400] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.use_ssl = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.267576] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.267750] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.267916] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.auth_type = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.268092] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.268260] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.268428] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.268588] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.connect_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.268747] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.connect_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.268909] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.endpoint_override = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.269108] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.269284] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.269448] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.max_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.269609] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.min_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.269770] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.region_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.269932] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.retriable_status_codes = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.270107] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.service_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.270284] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.service_type = identity {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.270448] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.270606] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.status_code_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.270764] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.status_code_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.270920] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.271114] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.271281] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] keystone.version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.271504] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.connection_uri = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.271676] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.cpu_mode = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.271846] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.cpu_model_extra_flags = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.272029] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.cpu_models = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.272209] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.cpu_power_governor_high = performance {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.272380] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.cpu_power_governor_low = powersave {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.272545] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.cpu_power_management = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.272716] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.272880] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.device_detach_attempts = 8 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.273053] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.device_detach_timeout = 20 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.273228] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.disk_cachemodes = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.273389] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.disk_prefix = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.273556] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.enabled_perf_events = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.273724] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.file_backed_memory = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.273889] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.gid_maps = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.274062] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.hw_disk_discard = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.274233] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.hw_machine_type = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.274398] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.images_rbd_ceph_conf = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.274563] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.274725] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.274894] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.images_rbd_glance_store_name = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.275075] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.images_rbd_pool = rbd {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.275257] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.images_type = default {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.275420] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.images_volume_group = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.275582] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.inject_key = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.275746] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.inject_partition = -2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.275909] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.inject_password = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.276085] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.iscsi_iface = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.276255] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.iser_use_multipath = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.276416] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_bandwidth = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.276577] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.276737] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_downtime = 500 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.276896] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.277096] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.277266] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_inbound_addr = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.277436] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.277655] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_permit_post_copy = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.277799] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_scheme = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.277973] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_timeout_action = abort {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.278154] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_tunnelled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.278319] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_uri = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.278495] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.live_migration_with_native_tls = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.278655] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.max_queues = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.278817] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.279079] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.279262] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.nfs_mount_options = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.279551] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.279725] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.279893] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.num_iser_scan_tries = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.280066] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.num_memory_encrypted_guests = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.280237] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.280399] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.num_pcie_ports = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.280566] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.num_volume_scan_tries = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.280730] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.pmem_namespaces = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.280890] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.quobyte_client_cfg = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.281181] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.281364] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rbd_connect_timeout = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.281558] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.281728] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.281891] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rbd_secret_uuid = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.282066] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rbd_user = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.282239] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.282414] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.remote_filesystem_transport = ssh {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.282575] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rescue_image_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.282733] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rescue_kernel_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.282892] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rescue_ramdisk_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.283072] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.283240] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.rx_queue_size = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.283409] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.smbfs_mount_options = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.283678] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.283850] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.snapshot_compression = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.284023] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.snapshot_image_format = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.284242] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.284414] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.sparse_logical_volumes = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.284578] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.swtpm_enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.284750] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.swtpm_group = tss {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.284917] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.swtpm_user = tss {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.285107] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.sysinfo_serial = unique {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.285271] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.tb_cache_size = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.285430] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.tx_queue_size = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.285594] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.uid_maps = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.285758] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.use_virtio_for_bridges = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.285928] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.virt_type = kvm {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.286109] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.volume_clear = zero {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.286279] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.volume_clear_size = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.286444] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.volume_use_multipath = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.286603] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.vzstorage_cache_path = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.286770] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.286943] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.vzstorage_mount_group = qemu {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.287164] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.vzstorage_mount_opts = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.287349] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.287623] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.287803] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.vzstorage_mount_user = stack {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.287997] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.288168] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.288345] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.auth_type = password {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.288508] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.288669] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.288833] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.288993] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.connect_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.289198] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.connect_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.289387] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.default_floating_pool = public {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.289563] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.endpoint_override = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.289745] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.extension_sync_interval = 600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.289912] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.http_retries = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.290108] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.290286] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.290447] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.max_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.290620] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.290780] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.min_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.290950] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.ovs_bridge = br-int {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.291132] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.physnets = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.291305] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.region_name = RegionOne {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.291464] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.retriable_status_codes = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.291635] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.service_metadata_proxy = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.291793] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.service_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.291961] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.service_type = network {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.292161] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.292337] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.status_code_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.292498] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.status_code_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.292655] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.292833] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.292993] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] neutron.version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.293179] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] notifications.bdms_in_notifications = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.293357] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] notifications.default_level = INFO {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.293530] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] notifications.notification_format = unversioned {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.293690] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] notifications.notify_on_state_change = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.293861] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.294043] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] pci.alias = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.294221] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] pci.device_spec = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.294386] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] pci.report_in_placement = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.294555] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.294727] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.auth_type = password {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.294894] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.295063] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.295289] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.295480] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.295647] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.connect_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.295806] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.connect_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.295965] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.default_domain_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.296138] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.default_domain_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.296302] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.domain_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.296461] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.domain_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.296619] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.endpoint_override = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.296779] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.296936] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.297107] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.max_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.297271] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.min_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.297439] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.password = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.297598] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.project_domain_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.297765] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.project_domain_name = Default {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.297931] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.project_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.298116] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.project_name = service {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.298352] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.region_name = RegionOne {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.298539] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.retriable_status_codes = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.298701] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.service_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.298878] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.service_type = placement {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.299107] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.299291] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.status_code_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.299462] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.status_code_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.299625] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.system_scope = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.299787] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.299949] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.trust_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.300126] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.user_domain_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.300300] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.user_domain_name = Default {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.300458] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.user_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.300632] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.username = placement {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.300811] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.300973] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] placement.version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.301167] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.cores = 20 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.301334] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.count_usage_from_placement = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.301536] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.301720] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.injected_file_content_bytes = 10240 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.301887] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.injected_file_path_length = 255 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.302067] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.injected_files = 5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.302240] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.instances = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.302404] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.key_pairs = 100 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.302571] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.metadata_items = 128 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.302737] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.ram = 51200 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.302902] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.recheck_quota = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.303082] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.server_group_members = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.303252] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] quota.server_groups = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.303425] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.303591] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.303754] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.image_metadata_prefilter = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.303915] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.304089] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.max_attempts = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.304259] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.max_placement_results = 1000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.304425] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.304587] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.query_placement_for_image_type_support = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.304750] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.304923] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] scheduler.workers = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.305110] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.305288] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.305465] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.305633] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.305796] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.305958] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.306137] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.306329] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.306496] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.host_subset_size = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.306659] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.306817] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.306997] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.307193] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.isolated_hosts = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.307368] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.isolated_images = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.307533] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.307696] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.307864] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.308040] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.pci_in_placement = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.308212] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.308381] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.308546] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.308707] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.308867] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.309044] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.309242] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.track_instance_changes = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.309428] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.309607] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] metrics.required = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.309770] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] metrics.weight_multiplier = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.309933] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.310112] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] metrics.weight_setting = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.310430] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.310609] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] serial_console.enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.310791] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] serial_console.port_range = 10000:20000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.310962] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.311171] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.311353] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] serial_console.serialproxy_port = 6083 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.311555] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.311738] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.auth_type = password {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.311902] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.312076] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.312247] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.312407] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.312566] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.312737] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.send_service_user_token = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.312901] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.313074] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] service_user.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.313250] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.agent_enabled = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.313427] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.313738] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.313930] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.314117] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.html5proxy_port = 6082 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.314289] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.image_compression = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.314448] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.jpeg_compression = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.314607] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.playback_compression = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.314776] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.server_listen = 127.0.0.1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.314947] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.315149] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.streaming_mode = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.315330] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] spice.zlib_compression = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.315497] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] upgrade_levels.baseapi = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.315667] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] upgrade_levels.compute = auto {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.315827] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] upgrade_levels.conductor = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.315984] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] upgrade_levels.scheduler = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.316166] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.316333] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.auth_type = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.316492] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.316652] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.316812] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.316972] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.317141] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.317309] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.317551] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vendordata_dynamic_auth.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.317749] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.api_retry_count = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.317915] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.ca_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.318101] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.cache_prefix = devstack-image-cache {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.318278] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.cluster_name = testcl1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.318444] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.connection_pool_size = 10 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.318608] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.console_delay_seconds = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.318778] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.datastore_regex = ^datastore.* {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.318982] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.319202] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.host_password = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.319382] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.host_port = 443 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.319555] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.host_username = administrator@vsphere.local {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.319726] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.insecure = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.319889] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.integration_bridge = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.320072] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.maximum_objects = 100 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.320239] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.pbm_default_policy = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.320423] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.pbm_enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.320587] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.pbm_wsdl_location = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.320755] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.320915] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.serial_port_proxy_uri = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.321086] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.serial_port_service_uri = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.321258] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.task_poll_interval = 0.5 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.321476] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.use_linked_clone = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.321686] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.vnc_keymap = en-us {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.321860] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.vnc_port = 5900 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.322040] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vmware.vnc_port_total = 10000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.322234] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.auth_schemes = ['none'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.322415] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.322713] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.322900] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.323090] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.novncproxy_port = 6080 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.323306] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.server_listen = 127.0.0.1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.323488] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.323651] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.vencrypt_ca_certs = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.323812] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.vencrypt_client_cert = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.323970] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vnc.vencrypt_client_key = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.324172] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.324345] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.disable_deep_image_inspection = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.324510] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.324677] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.324839] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.325008] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.disable_rootwrap = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.325181] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.enable_numa_live_migration = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.325345] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.325508] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.325666] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.325826] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.libvirt_disable_apic = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.325986] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.326164] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.326329] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.326490] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.326650] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.326808] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.326986] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.327187] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.327375] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.327551] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.327773] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.327973] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.client_socket_timeout = 900 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.328165] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.default_pool_size = 1000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.328337] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.keep_alive = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.328508] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.max_header_line = 16384 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.328670] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.secure_proxy_ssl_header = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.328831] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.ssl_ca_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.328991] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.ssl_cert_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.329199] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.ssl_key_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.329374] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.tcp_keepidle = 600 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.329549] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.329717] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] zvm.ca_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.329875] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] zvm.cloud_connector_url = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.330219] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.330410] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] zvm.reachable_timeout = 300 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.330598] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.enforce_new_defaults = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.330773] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.enforce_scope = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.330950] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.policy_default_rule = default {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.331150] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.331333] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.policy_file = policy.yaml {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.331535] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.331703] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.331863] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.332032] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.332201] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.332372] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.332548] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.332724] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.connection_string = messaging:// {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.332890] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.enabled = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.333072] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.es_doc_type = notification {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.333244] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.es_scroll_size = 10000 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.333412] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.es_scroll_time = 2m {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.333575] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.filter_error_trace = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.333741] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.hmac_keys = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.333906] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.sentinel_service_name = mymaster {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.334080] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.socket_timeout = 0.1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.334247] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.trace_requests = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.334406] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler.trace_sqlalchemy = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.334577] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler_jaeger.process_tags = {} {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.334735] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler_jaeger.service_name_prefix = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.334898] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] profiler_otlp.service_name_prefix = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.335071] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] remote_debug.host = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.335269] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] remote_debug.port = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.335461] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.335628] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.335792] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.335952] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.336127] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.336291] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.336452] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.336614] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.336773] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.336942] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.337114] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.337290] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.337456] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.337623] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.337791] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.337957] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.338134] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.338311] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.338472] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.338634] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.338799] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.338965] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.339174] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.339358] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.339534] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.339710] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.339875] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.340055] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.340233] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.340399] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.ssl = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.340573] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.340742] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.340903] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.341083] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.341259] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.ssl_version = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.341424] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.341612] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.341779] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_notifications.retry = -1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.341963] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.342152] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_messaging_notifications.transport_url = **** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.342326] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.auth_section = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.342491] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.auth_type = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.342649] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.cafile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.342804] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.certfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.342965] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.collect_timing = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.343135] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.connect_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.343294] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.connect_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.343452] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.endpoint_id = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.343606] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.endpoint_override = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.343765] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.insecure = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.343921] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.keyfile = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.344087] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.max_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.344249] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.min_version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.344405] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.region_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.344562] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.retriable_status_codes = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.344716] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.service_name = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.344873] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.service_type = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.345043] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.split_loggers = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.345208] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.status_code_retries = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.345365] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.status_code_retry_delay = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.345520] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.timeout = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.345679] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.valid_interfaces = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.345830] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_limit.version = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.345991] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_reports.file_event_handler = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.346167] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.346329] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] oslo_reports.log_dir = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.346497] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.346654] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.346810] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.346998] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.347202] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.347368] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.347548] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.347720] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_ovs_privileged.group = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.347880] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.348057] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.348230] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.348387] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] vif_plug_ovs_privileged.user = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.348558] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.flat_interface = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.348741] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.348910] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.349130] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.349317] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.349486] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.349652] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.349867] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.350079] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.350261] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_ovs.isolate_vif = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.350434] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.350599] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.350769] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.350938] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_ovs.ovsdb_interface = native {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.351117] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_vif_ovs.per_port_bridge = False {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.351287] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] os_brick.lock_path = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.351458] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] privsep_osbrick.capabilities = [21] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.351615] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] privsep_osbrick.group = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.351772] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] privsep_osbrick.helper_command = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.351935] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.352111] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.352272] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] privsep_osbrick.user = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.352442] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.352599] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] nova_sys_admin.group = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.352755] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] nova_sys_admin.helper_command = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.352922] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.353097] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.353259] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] nova_sys_admin.user = None {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2620}} [ 569.353389] env[61594]: DEBUG oslo_service.service [None req-ce9406e1-9ba9-4a5a-8a48-c15df0b0b440 None None] ******************************************************************************** {{(pid=61594) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2624}} [ 569.353805] env[61594]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 569.354666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg dc3124ed7d154ece9400cb5099898648 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.362379] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc3124ed7d154ece9400cb5099898648 [ 569.363457] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Getting list of instances from cluster (obj){ [ 569.363457] env[61594]: value = "domain-c8" [ 569.363457] env[61594]: _type = "ClusterComputeResource" [ 569.363457] env[61594]: } {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 569.364655] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02d5779-f032-48ac-a18c-4105ec2277dd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.373620] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Got total of 0 instances {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 569.374129] env[61594]: WARNING nova.virt.vmwareapi.driver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 569.374548] env[61594]: INFO nova.virt.node [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Generated node identity f0ff3a26-85e8-47dd-b241-86a582e8d4be [ 569.374766] env[61594]: INFO nova.virt.node [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Wrote node identity f0ff3a26-85e8-47dd-b241-86a582e8d4be to /opt/stack/data/n-cpu-1/compute_id [ 569.375157] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 0cb282aafe2b40aabb04597328cf3920 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.388017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cb282aafe2b40aabb04597328cf3920 [ 569.388745] env[61594]: WARNING nova.compute.manager [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Compute nodes ['f0ff3a26-85e8-47dd-b241-86a582e8d4be'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 569.389217] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 5626111ec29043beb2875e247243b70b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.411602] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5626111ec29043beb2875e247243b70b [ 569.412324] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg cfcb1c9be5ef4c08939a1cce41ba8a70 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.422530] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfcb1c9be5ef4c08939a1cce41ba8a70 [ 569.423017] env[61594]: INFO nova.compute.manager [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 569.423573] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg b5188fe8d93b4f2384f7d5cc52892b3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.432650] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5188fe8d93b4f2384f7d5cc52892b3a [ 569.433466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 5870814a9f6743cf88f56eeb4c93d28e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.443608] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5870814a9f6743cf88f56eeb4c93d28e [ 569.444150] env[61594]: WARNING nova.compute.manager [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 569.444405] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.444645] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.444821] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.444991] env[61594]: DEBUG nova.compute.resource_tracker [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 569.446087] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ee3eb09-d60f-478a-a969-bb3b574f54ec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.454872] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b2e2fd-44c5-4715-a938-146f2bdb0a96 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.468672] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6635a2a6-3a4f-49cb-9526-3b77e5de90e3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.475019] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7db3954-ac1e-4a48-b105-fd2b24e95b9f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.504806] env[61594]: DEBUG nova.compute.resource_tracker [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181521MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 569.504945] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.505150] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.505483] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 1d434f7ef19044688f264b8ac8de548f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.516319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1d434f7ef19044688f264b8ac8de548f [ 569.516785] env[61594]: WARNING nova.compute.resource_tracker [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] No compute node record for cpu-1:f0ff3a26-85e8-47dd-b241-86a582e8d4be: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host f0ff3a26-85e8-47dd-b241-86a582e8d4be could not be found. [ 569.517760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 1a655c457713485aa70f2f2bc642f34d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.529263] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a655c457713485aa70f2f2bc642f34d [ 569.529850] env[61594]: INFO nova.compute.resource_tracker [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: f0ff3a26-85e8-47dd-b241-86a582e8d4be [ 569.530221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 6720fc0833c542158930d7af905b4bf7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.538768] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6720fc0833c542158930d7af905b4bf7 [ 569.539601] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 5a1b8c98c0f24d0388760d22a003df3e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.557300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a1b8c98c0f24d0388760d22a003df3e [ 569.557794] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 36ea8c1885c14ebdb501969bb1992dbc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.580304] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36ea8c1885c14ebdb501969bb1992dbc [ 569.580963] env[61594]: DEBUG nova.compute.resource_tracker [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 569.581145] env[61594]: DEBUG nova.compute.resource_tracker [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 569.684360] env[61594]: INFO nova.scheduler.client.report [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] [req-c7f2dd54-9f17-47af-b0e7-146b5ed92838] Created resource provider record via placement API for resource provider with UUID f0ff3a26-85e8-47dd-b241-86a582e8d4be and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 569.701493] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-200cd751-2e42-4bee-9f8a-3725d1cb1efc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.708834] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266276ac-1770-4194-8af3-51f9406da46b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.738237] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac579dca-5707-4e91-bfe9-2818b47ecc78 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.745118] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d8855c0-551a-44ec-9ed6-a00703fa7e23 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.758348] env[61594]: DEBUG nova.compute.provider_tree [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Updating inventory in ProviderTree for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 569.758877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg 7095eb94db324daaa9e1a35df78de534 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.769417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7095eb94db324daaa9e1a35df78de534 [ 569.800579] env[61594]: DEBUG nova.scheduler.client.report [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Updated inventory for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 569.800809] env[61594]: DEBUG nova.compute.provider_tree [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Updating resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be generation from 0 to 1 during operation: update_inventory {{(pid=61594) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 569.800948] env[61594]: DEBUG nova.compute.provider_tree [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Updating inventory in ProviderTree for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 569.848331] env[61594]: DEBUG nova.compute.provider_tree [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Updating resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be generation from 1 to 2 during operation: update_traits {{(pid=61594) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 569.850543] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Expecting reply to msg ff3407c32e7144c49ac91dabe7bafd54 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 569.865328] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff3407c32e7144c49ac91dabe7bafd54 [ 569.865973] env[61594]: DEBUG nova.compute.resource_tracker [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 569.866173] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.361s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 569.866337] env[61594]: DEBUG nova.service [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Creating RPC server for service compute {{(pid=61594) start /opt/stack/nova/nova/service.py:182}} [ 569.876550] env[61594]: INFO oslo.messaging._drivers.impl_rabbit [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Creating fanout queue: compute_fanout_7370be98f2384ea1bcbcf391e0a4ba0f [ 569.879526] env[61594]: DEBUG nova.service [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] Join ServiceGroup membership for this service compute {{(pid=61594) start /opt/stack/nova/nova/service.py:199}} [ 569.879691] env[61594]: DEBUG nova.servicegroup.drivers.db [None req-3fd8a48e-be09-4cfd-bbea-203b9bc8c240 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=61594) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 574.882620] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7740fec7ef294f9aa6f419aa86d88363 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 574.896357] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7740fec7ef294f9aa6f419aa86d88363 [ 579.191560] env[61594]: DEBUG dbcounter [-] [61594] Writing DB stats nova_cell0:SELECT=1 {{(pid=61594) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 579.192230] env[61594]: DEBUG dbcounter [-] [61594] Writing DB stats nova_cell1:SELECT=1 {{(pid=61594) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 611.882918] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.883682] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 22759e835c4a40b18fe872d87a92a52f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 611.900842] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22759e835c4a40b18fe872d87a92a52f [ 611.901757] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Getting list of instances from cluster (obj){ [ 611.901757] env[61594]: value = "domain-c8" [ 611.901757] env[61594]: _type = "ClusterComputeResource" [ 611.901757] env[61594]: } {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 611.902790] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1a7432-0041-46ca-93c9-b3e919276de4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.919703] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Got total of 0 instances {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 611.919968] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.920309] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Getting list of instances from cluster (obj){ [ 611.920309] env[61594]: value = "domain-c8" [ 611.920309] env[61594]: _type = "ClusterComputeResource" [ 611.920309] env[61594]: } {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 611.921273] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3a4ae0-e52a-4592-b584-011facbc6ce4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.933346] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Got total of 0 instances {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 613.429125] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "ef6e717f-c8b2-4536-8425-89f218d49f1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.429556] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "ef6e717f-c8b2-4536-8425-89f218d49f1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.430021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 09f0c6ac8787478d974f6cef6e2a25cb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.448160] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09f0c6ac8787478d974f6cef6e2a25cb [ 613.448451] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 613.450946] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 05378075d8fb4e3d865d56b59a70b3c4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.565630] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05378075d8fb4e3d865d56b59a70b3c4 [ 613.601555] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.601713] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.603440] env[61594]: INFO nova.compute.claims [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.605182] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg fe33e14203cd442abb4236ef67da0079 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.692094] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe33e14203cd442abb4236ef67da0079 [ 613.693940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 50348e03e3af40acbefa052db3449d56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.718364] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50348e03e3af40acbefa052db3449d56 [ 613.786195] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquiring lock "04fd7039-c2c8-4b78-8c3d-37eb66fe2115" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.786668] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Lock "04fd7039-c2c8-4b78-8c3d-37eb66fe2115" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.790031] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 053cc2b6da5242f7ab132826b701c09e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.790031] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f111ba3b-0a92-4a8b-beda-7c6a38829023 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.798886] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec750cb-1818-43ca-b85d-c4142656e855 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.807176] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 053cc2b6da5242f7ab132826b701c09e [ 613.807845] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 613.810722] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 4fb0619a160e4a9fbe735b193b91562a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.839934] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96505a0-c6c1-4b1a-be9b-887d98fa7e3c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.848742] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01423997-1a4b-4049-a235-70f68a5ada94 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.870484] env[61594]: DEBUG nova.compute.provider_tree [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.871087] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg f25ad60d29bb41cf9f8a3992642d13c7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.879673] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fb0619a160e4a9fbe735b193b91562a [ 613.886212] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f25ad60d29bb41cf9f8a3992642d13c7 [ 613.888218] env[61594]: DEBUG nova.scheduler.client.report [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 613.890654] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg fee1ce62c7064395b65b5434aae2ed22 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.905857] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 613.909877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fee1ce62c7064395b65b5434aae2ed22 [ 613.910852] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.309s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 613.911465] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 613.913229] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 5ce5e75281e64b5fba730b48ce1e3606 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.914401] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.009s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 613.915856] env[61594]: INFO nova.compute.claims [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 613.921019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 8396acc6c5dc41388f1546637bba66e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.982824] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ce5e75281e64b5fba730b48ce1e3606 [ 613.984567] env[61594]: DEBUG nova.compute.utils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 613.985298] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 2060a7def10142d08f29fde6e961ce3d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 613.987120] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 613.987120] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 613.993925] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8396acc6c5dc41388f1546637bba66e0 [ 613.995399] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 414dc2403ec049ed9b40a46e734439e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.004685] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 414dc2403ec049ed9b40a46e734439e3 [ 614.005221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2060a7def10142d08f29fde6e961ce3d [ 614.010373] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 614.012823] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg dfd8d6e3551e4da28b7e707d11eaedd4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.061118] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691bd724-b248-426f-8afd-bb0e697b4725 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.069409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfd8d6e3551e4da28b7e707d11eaedd4 [ 614.072405] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 0117d6e8836248d6a21c4610b0289689 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.081505] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1483c2a6-df8c-4e04-b603-b4c1df28e989 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.123210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0117d6e8836248d6a21c4610b0289689 [ 614.124390] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 614.128796] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68922bdd-53c3-42d7-b25b-d5f2c8ff6b0d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.140091] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681707fc-6df2-40cb-bbec-24b27100fc2f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.156732] env[61594]: DEBUG nova.compute.provider_tree [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.156732] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg bbf4d570f291474fa7c6f1e47aa66c85 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.165626] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbf4d570f291474fa7c6f1e47aa66c85 [ 614.166762] env[61594]: DEBUG nova.scheduler.client.report [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 614.169541] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 5261472e8921464e97e3d352eca03dfe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.190182] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5261472e8921464e97e3d352eca03dfe [ 614.191135] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.277s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.191730] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 614.193444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 8caf55187d1a40748f4bc8d3f75ebbff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.244761] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8caf55187d1a40748f4bc8d3f75ebbff [ 614.249149] env[61594]: DEBUG nova.compute.utils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 614.249772] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 1cce8d8f52a44d27a52717052dd08adb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.255143] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 614.255143] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 614.255143] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 614.255311] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 614.255311] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 614.255311] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 614.255311] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 614.255311] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 614.255463] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 614.255463] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 614.255463] env[61594]: DEBUG nova.virt.hardware [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 614.255686] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 614.256920] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c0b57d-4487-412b-bc8b-4160c4546427 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.265657] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd90fed2-8ace-462d-a562-a0978c9c4c50 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.270331] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cce8d8f52a44d27a52717052dd08adb [ 614.271113] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 614.272605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 08691715210941cbbc8a78ff37bf11cf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.287288] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f25173-242e-4e27-aee1-2fff81bc8a5a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.319497] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08691715210941cbbc8a78ff37bf11cf [ 614.322385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 78e53aeb9a1c4965b3ff131bb8d8761b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 614.359722] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78e53aeb9a1c4965b3ff131bb8d8761b [ 614.361750] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 614.393035] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 614.393254] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 614.393411] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 614.393582] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 614.393729] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 614.393957] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 614.394175] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 614.394338] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 614.394679] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 614.394679] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 614.394836] env[61594]: DEBUG nova.virt.hardware [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 614.396013] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a50cfa-0227-4d10-a228-b88dbb152d88 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.406009] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52a6458-c206-46e1-89a3-ebb578a6006e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.425123] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 614.435023] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.435751] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-904deb5b-2283-4e17-81ab-2d57d184d95d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.449869] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Created folder: OpenStack in parent group-v4. [ 614.450459] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Creating folder: Project (82a8caeb29874fcd98cafe0ded9fe9b1). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.453115] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca2bcd70-ea75-42a5-baf8-292ee5fb7059 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.460905] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Created folder: Project (82a8caeb29874fcd98cafe0ded9fe9b1) in parent group-v277030. [ 614.462029] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Creating folder: Instances. Parent ref: group-v277031. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.462029] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81d0f389-1dba-4ab6-a12b-0860ef7d8ffb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.472520] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Created folder: Instances in parent group-v277031. [ 614.472702] env[61594]: DEBUG oslo.service.loopingcall [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.472887] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 614.474299] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-19aefe0d-d3a5-4bf4-a98b-60d5c3849ec0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.491353] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 614.491353] env[61594]: value = "task-1291376" [ 614.491353] env[61594]: _type = "Task" [ 614.491353] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.500543] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291376, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.545170] env[61594]: DEBUG nova.policy [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26298f53a4904c86a795c9a1717500e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36fff95014d4d868f65dafefe1958fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 615.009822] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291376, 'name': CreateVM_Task, 'duration_secs': 0.337227} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.009822] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 615.010561] env[61594]: DEBUG oslo_vmware.service [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cc4814-4cfd-4b6c-beda-163ec0a90e97 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.018638] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.018638] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.018638] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 615.019116] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db69c9b8-5066-473e-bda7-675c9211799d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.023848] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Waiting for the task: (returnval){ [ 615.023848] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52e72c65-e290-1b6b-d47c-ae744c85b799" [ 615.023848] env[61594]: _type = "Task" [ 615.023848] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.033170] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52e72c65-e290-1b6b-d47c-ae744c85b799, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.538537] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 615.539557] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 615.539557] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 615.539557] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.540083] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 615.540330] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-826a88ac-311f-408c-9f0b-2f24b6383b97 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.561592] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 615.562771] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 615.562771] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0dc920-511d-4e18-a73f-0348d3661da9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.572889] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a04cba-cd04-4de8-9dc3-08bf5bea0c28 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.581627] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Waiting for the task: (returnval){ [ 615.581627] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]523fcbe6-adfd-320e-4b4d-2f7a68f3da81" [ 615.581627] env[61594]: _type = "Task" [ 615.581627] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.592610] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]523fcbe6-adfd-320e-4b4d-2f7a68f3da81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.096644] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 616.096644] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Creating directory with path [datastore1] vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 616.096644] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-656a6b0a-5889-45ef-aa21-bee7a3af273f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.119250] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Created directory with path [datastore1] vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 616.119431] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Fetch image to [datastore1] vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 616.119616] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 616.120438] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6ce598-62fe-4664-af7e-0535fb7c587e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.128817] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377668db-ce78-4d0d-a19b-09ffc3570591 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.139496] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cdfd6e1-4b08-4e7c-bba5-0454450cfe1f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.175324] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f89f38-334a-47d7-9f8a-8daf38934b34 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.181849] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5fabdaf1-ec92-4a91-bb3d-80a17978ab8d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.219161] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 616.310630] env[61594]: DEBUG oslo_vmware.rw_handles [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 616.381778] env[61594]: DEBUG oslo_vmware.rw_handles [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 616.381778] env[61594]: DEBUG oslo_vmware.rw_handles [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 616.638116] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Successfully created port: 2b013813-1b56-4582-98f4-af17c7453ad5 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.073406] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "fd68bfa8-62af-490d-bfad-5ef59a733336" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.073714] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "fd68bfa8-62af-490d-bfad-5ef59a733336" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.074394] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg e2feb7819f704ce28afc23ee8c6131c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.100183] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2feb7819f704ce28afc23ee8c6131c3 [ 619.100183] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 619.103671] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg a0021b47a85f4f0a9015a4c7b05e2a2a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.172437] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0021b47a85f4f0a9015a4c7b05e2a2a [ 619.205807] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.205807] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.208144] env[61594]: INFO nova.compute.claims [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.212853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg b30a465986ec4548a20c9a3742db8248 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.314251] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b30a465986ec4548a20c9a3742db8248 [ 619.317644] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 6da7aaabfb284a49a6dc88a1911e18db in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.338314] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6da7aaabfb284a49a6dc88a1911e18db [ 619.451938] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc0cbb0-6bc3-4034-885e-702e54860938 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.463828] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f0b333-c493-43c2-ba8c-24e7e7dd17e2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.504175] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7f339d-1aaa-4032-a045-b6c284594082 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.518296] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85429ff2-9d7c-4cac-bb84-4f4e1e1a369e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.542928] env[61594]: DEBUG nova.compute.provider_tree [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.542928] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 871ebd104c84478bbc653c57463d6d01 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.557230] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 871ebd104c84478bbc653c57463d6d01 [ 619.558653] env[61594]: DEBUG nova.scheduler.client.report [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 619.560990] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg b6d6aa7fbf0443f5a580f7f8b88e1dbf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.585719] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6d6aa7fbf0443f5a580f7f8b88e1dbf [ 619.587126] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.383s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.587932] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 619.589873] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 3b0307b7b7924d9fbb4cc7d213039794 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.668291] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b0307b7b7924d9fbb4cc7d213039794 [ 619.670704] env[61594]: DEBUG nova.compute.utils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.677508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 7b4ea2bf86164abea02315240fe53716 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.677775] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 619.678895] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 619.704431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b4ea2bf86164abea02315240fe53716 [ 619.704431] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 619.706248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg e9940db96c39442b87c3ca21562794d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.750688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9940db96c39442b87c3ca21562794d8 [ 619.756281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 5cd2d1189348421c9e01fd374f5566df in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 619.805800] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cd2d1189348421c9e01fd374f5566df [ 619.808741] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 619.849760] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 619.850088] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 619.850186] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.850435] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 619.850531] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.850685] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 619.850890] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 619.851102] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 619.851297] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 619.851461] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 619.851632] env[61594]: DEBUG nova.virt.hardware [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 619.852844] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a7346d-f3d3-44f4-a6c7-573df3bc4b68 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.868693] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f0cea2-484e-4d2d-872d-26bff3693ee5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.950563] env[61594]: DEBUG nova.policy [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34cf0f0eff664be3be9da8676c5bd742', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d235e06cce044529d4f982bf37af3e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 621.546605] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Successfully created port: c28c4885-b80c-41e0-a036-b309a3352a9a {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.537460] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.537460] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.537592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 43b6b2a5b9614b2996f7ca879f8971a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.555084] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43b6b2a5b9614b2996f7ca879f8971a7 [ 622.557357] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 622.557357] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg f67fc4e7e37f453ba7594fc835ed11e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.599161] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f67fc4e7e37f453ba7594fc835ed11e4 [ 622.624182] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.624182] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.624182] env[61594]: INFO nova.compute.claims [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.626629] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 7594aeeadacc4b958478f7c5bd8de8a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.642237] env[61594]: ERROR nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. [ 622.642237] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 622.642237] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 622.642237] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 622.642237] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.642237] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 622.642237] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.642237] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 622.642237] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.642237] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 622.642237] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.642237] env[61594]: ERROR nova.compute.manager raise self.value [ 622.642237] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.642237] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 622.642237] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.642237] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 622.642703] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.642703] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 622.642703] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. [ 622.642703] env[61594]: ERROR nova.compute.manager [ 622.642703] env[61594]: Traceback (most recent call last): [ 622.642703] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 622.642703] env[61594]: listener.cb(fileno) [ 622.642703] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 622.642703] env[61594]: result = function(*args, **kwargs) [ 622.642703] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.642703] env[61594]: return func(*args, **kwargs) [ 622.642703] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 622.642703] env[61594]: raise e [ 622.642703] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 622.642703] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 622.642703] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.642703] env[61594]: created_port_ids = self._update_ports_for_instance( [ 622.642703] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.642703] env[61594]: with excutils.save_and_reraise_exception(): [ 622.642703] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.642703] env[61594]: self.force_reraise() [ 622.642703] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.642703] env[61594]: raise self.value [ 622.642703] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.642703] env[61594]: updated_port = self._update_port( [ 622.642703] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.642703] env[61594]: _ensure_no_port_binding_failure(port) [ 622.642703] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.642703] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 622.643462] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. [ 622.643462] env[61594]: Removing descriptor: 17 [ 622.644644] env[61594]: ERROR nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Traceback (most recent call last): [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] yield resources [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self.driver.spawn(context, instance, image_meta, [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] vm_ref = self.build_virtual_machine(instance, [ 622.644644] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] for vif in network_info: [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return self._sync_wrapper(fn, *args, **kwargs) [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self.wait() [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self[:] = self._gt.wait() [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return self._exit_event.wait() [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 622.644964] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] result = hub.switch() [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return self.greenlet.switch() [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] result = function(*args, **kwargs) [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return func(*args, **kwargs) [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] raise e [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] nwinfo = self.network_api.allocate_for_instance( [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] created_port_ids = self._update_ports_for_instance( [ 622.645361] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] with excutils.save_and_reraise_exception(): [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self.force_reraise() [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] raise self.value [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] updated_port = self._update_port( [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] _ensure_no_port_binding_failure(port) [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] raise exception.PortBindingFailed(port_id=port['id']) [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] nova.exception.PortBindingFailed: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. [ 622.645745] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] [ 622.646227] env[61594]: INFO nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Terminating instance [ 622.648131] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "refresh_cache-ef6e717f-c8b2-4536-8425-89f218d49f1a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.648449] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquired lock "refresh_cache-ef6e717f-c8b2-4536-8425-89f218d49f1a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.648686] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 622.649263] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 2e6671d30bcd41e9ba00d40498954e2c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.659390] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e6671d30bcd41e9ba00d40498954e2c [ 622.685794] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7594aeeadacc4b958478f7c5bd8de8a5 [ 622.687671] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg a26ce6505bd34451a1a43a34675e6002 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.697772] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a26ce6505bd34451a1a43a34675e6002 [ 622.716437] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.802293] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab4ea6f-7c08-4fd2-b163-c53f1adbf107 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.810342] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21dc7bd-20b5-45ae-9daa-f66f9e0147c7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.854270] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c3130a2-44be-433a-bd66-d313944be8af {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.862068] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573d17b9-ff66-475a-bda3-007988b18e85 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.875891] env[61594]: DEBUG nova.compute.provider_tree [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.876498] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg ca2d07dc41724685a8146684396f4634 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.889979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca2d07dc41724685a8146684396f4634 [ 622.891555] env[61594]: DEBUG nova.scheduler.client.report [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 622.893278] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 50c4208ec4254f22a22eae0c490f20d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.908983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50c4208ec4254f22a22eae0c490f20d3 [ 622.909883] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.287s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 622.910634] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 622.915020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg d9649dce3dd04f9c93895c967ddf631b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.919012] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.919507] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg ca5fdc8fb3d846d0b870a08d606dffe1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.934830] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca5fdc8fb3d846d0b870a08d606dffe1 [ 622.935346] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Releasing lock "refresh_cache-ef6e717f-c8b2-4536-8425-89f218d49f1a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.935857] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 622.935970] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 622.937271] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ee52168-6a90-4a4c-b380-f80c6f9b9db8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.949323] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91dece4b-67f3-473d-b42e-0c454be99e50 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.964244] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9649dce3dd04f9c93895c967ddf631b [ 622.968017] env[61594]: DEBUG nova.compute.utils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 622.968017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg bf94e06470cb48ff9a69401281561cbf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 622.968017] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 622.968017] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 622.975187] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ef6e717f-c8b2-4536-8425-89f218d49f1a could not be found. [ 622.975420] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 622.975836] env[61594]: INFO nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 622.976127] env[61594]: DEBUG oslo.service.loopingcall [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.976630] env[61594]: DEBUG nova.compute.manager [-] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 622.976720] env[61594]: DEBUG nova.network.neutron [-] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 622.981462] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf94e06470cb48ff9a69401281561cbf [ 622.982041] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 622.983727] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 4882f0f52c0649d89f96e96b391314f9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.035232] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4882f0f52c0649d89f96e96b391314f9 [ 623.038628] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 2f394495bb96414c888492ae7148f594 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.085333] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f394495bb96414c888492ae7148f594 [ 623.085937] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 623.128578] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.128854] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.129028] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.129232] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.129510] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.129738] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.130325] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.130554] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.130751] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.130935] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.131094] env[61594]: DEBUG nova.virt.hardware [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.131973] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71c398c5-8adb-4e88-8950-90784c263587 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.143024] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a0bc2a-a156-4b7e-ae5a-f59ef51c43d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.189578] env[61594]: DEBUG nova.network.neutron [-] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.189578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e123ef004d214089a0bd943d419136e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.202874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e123ef004d214089a0bd943d419136e3 [ 623.203354] env[61594]: DEBUG nova.network.neutron [-] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.203774] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7c3b8d042def49e282e550fc1fe84146 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.227179] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c3b8d042def49e282e550fc1fe84146 [ 623.227719] env[61594]: INFO nova.compute.manager [-] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Took 0.25 seconds to deallocate network for instance. [ 623.230291] env[61594]: DEBUG nova.compute.claims [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 623.231111] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.231351] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.234251] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 467ca0763ba84093ab40f9bebd052e95 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.239548] env[61594]: DEBUG nova.policy [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34cf0f0eff664be3be9da8676c5bd742', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d235e06cce044529d4f982bf37af3e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 623.280422] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 467ca0763ba84093ab40f9bebd052e95 [ 623.385246] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6119043-0298-4aa2-bc49-11d4abdab1f5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.392133] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064a767f-4264-46c1-9ead-02a54c80a803 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.431710] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54dd5bd-c151-4b8f-b6f2-655d5dc42261 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.440236] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a1d141-b208-4f75-9960-ab63f2c08e99 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.455795] env[61594]: DEBUG nova.compute.provider_tree [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.456428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 2acc734d2d714045b37b602858e7d8a0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.466246] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2acc734d2d714045b37b602858e7d8a0 [ 623.467232] env[61594]: DEBUG nova.scheduler.client.report [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 623.469910] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 0b8314f00ff74c84bf8328505de149d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.491280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b8314f00ff74c84bf8328505de149d5 [ 623.496020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.261s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 623.496020] env[61594]: ERROR nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. [ 623.496020] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Traceback (most recent call last): [ 623.496020] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 623.496020] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self.driver.spawn(context, instance, image_meta, [ 623.496020] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 623.496020] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 623.496020] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 623.496020] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] vm_ref = self.build_virtual_machine(instance, [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] vif_infos = vmwarevif.get_vif_info(self._session, [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] for vif in network_info: [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return self._sync_wrapper(fn, *args, **kwargs) [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self.wait() [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self[:] = self._gt.wait() [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return self._exit_event.wait() [ 623.496425] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] result = hub.switch() [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return self.greenlet.switch() [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] result = function(*args, **kwargs) [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] return func(*args, **kwargs) [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] raise e [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] nwinfo = self.network_api.allocate_for_instance( [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 623.496829] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] created_port_ids = self._update_ports_for_instance( [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] with excutils.save_and_reraise_exception(): [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] self.force_reraise() [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] raise self.value [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] updated_port = self._update_port( [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] _ensure_no_port_binding_failure(port) [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 623.497192] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] raise exception.PortBindingFailed(port_id=port['id']) [ 623.497486] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] nova.exception.PortBindingFailed: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. [ 623.497486] env[61594]: ERROR nova.compute.manager [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] [ 623.497486] env[61594]: DEBUG nova.compute.utils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 623.501404] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Build of instance ef6e717f-c8b2-4536-8425-89f218d49f1a was re-scheduled: Binding failed for port 2b013813-1b56-4582-98f4-af17c7453ad5, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 623.505317] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 623.505317] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "refresh_cache-ef6e717f-c8b2-4536-8425-89f218d49f1a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.505317] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquired lock "refresh_cache-ef6e717f-c8b2-4536-8425-89f218d49f1a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.505317] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.505454] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 14f8c83b6c08484c841f3b213f22cc73 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.511733] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14f8c83b6c08484c841f3b213f22cc73 [ 623.540789] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.688408] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.688958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg fe65a73229ef44db9765210b5587f61d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.702618] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe65a73229ef44db9765210b5587f61d [ 623.702828] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Releasing lock "refresh_cache-ef6e717f-c8b2-4536-8425-89f218d49f1a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.702985] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 623.703147] env[61594]: DEBUG nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 623.703316] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 623.733220] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.734102] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 41ac46336c9f4d1a83d118a42bd59957 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.745705] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41ac46336c9f4d1a83d118a42bd59957 [ 623.746733] env[61594]: DEBUG nova.network.neutron [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.747253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 4da05ae6fdc0409d97709d7db25300c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.758036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4da05ae6fdc0409d97709d7db25300c3 [ 623.758036] env[61594]: INFO nova.compute.manager [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: ef6e717f-c8b2-4536-8425-89f218d49f1a] Took 0.05 seconds to deallocate network for instance. [ 623.758036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg b7bee07eb0564d5ba26bb8d32572f87d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.793413] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7bee07eb0564d5ba26bb8d32572f87d [ 623.795930] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 4ea45ba34d7c4b7e96b3bc933610f479 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.843035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ea45ba34d7c4b7e96b3bc933610f479 [ 623.852510] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Successfully created port: 55260f69-7e9b-4733-95ed-eef39c8b95bf {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.865337] env[61594]: INFO nova.scheduler.client.report [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Deleted allocations for instance ef6e717f-c8b2-4536-8425-89f218d49f1a [ 623.874383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg bde5931831dc48a49bc6865a4bdde539 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 623.893359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bde5931831dc48a49bc6865a4bdde539 [ 623.893359] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4584c8c-7c17-423e-bd49-7eab8ace72e3 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "ef6e717f-c8b2-4536-8425-89f218d49f1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.464s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.120124] env[61594]: ERROR nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. [ 625.120124] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 625.120124] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 625.120124] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 625.120124] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 625.120124] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 625.120124] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 625.120124] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 625.120124] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.120124] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 625.120124] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.120124] env[61594]: ERROR nova.compute.manager raise self.value [ 625.120124] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 625.120124] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 625.120124] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.120124] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 625.121770] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.121770] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 625.121770] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. [ 625.121770] env[61594]: ERROR nova.compute.manager [ 625.121770] env[61594]: Traceback (most recent call last): [ 625.121770] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 625.121770] env[61594]: listener.cb(fileno) [ 625.121770] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 625.121770] env[61594]: result = function(*args, **kwargs) [ 625.121770] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.121770] env[61594]: return func(*args, **kwargs) [ 625.121770] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 625.121770] env[61594]: raise e [ 625.121770] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 625.121770] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 625.121770] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 625.121770] env[61594]: created_port_ids = self._update_ports_for_instance( [ 625.121770] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 625.121770] env[61594]: with excutils.save_and_reraise_exception(): [ 625.121770] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.121770] env[61594]: self.force_reraise() [ 625.121770] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.121770] env[61594]: raise self.value [ 625.121770] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 625.121770] env[61594]: updated_port = self._update_port( [ 625.121770] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.121770] env[61594]: _ensure_no_port_binding_failure(port) [ 625.121770] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.121770] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 625.122854] env[61594]: nova.exception.PortBindingFailed: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. [ 625.122854] env[61594]: Removing descriptor: 19 [ 625.122854] env[61594]: ERROR nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Traceback (most recent call last): [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] yield resources [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self.driver.spawn(context, instance, image_meta, [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self._vmops.spawn(context, instance, image_meta, injected_files, [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 625.122854] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] vm_ref = self.build_virtual_machine(instance, [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] vif_infos = vmwarevif.get_vif_info(self._session, [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] for vif in network_info: [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return self._sync_wrapper(fn, *args, **kwargs) [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self.wait() [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self[:] = self._gt.wait() [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return self._exit_event.wait() [ 625.123182] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] result = hub.switch() [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return self.greenlet.switch() [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] result = function(*args, **kwargs) [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return func(*args, **kwargs) [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] raise e [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] nwinfo = self.network_api.allocate_for_instance( [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 625.123531] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] created_port_ids = self._update_ports_for_instance( [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] with excutils.save_and_reraise_exception(): [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self.force_reraise() [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] raise self.value [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] updated_port = self._update_port( [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] _ensure_no_port_binding_failure(port) [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 625.123835] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] raise exception.PortBindingFailed(port_id=port['id']) [ 625.124130] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] nova.exception.PortBindingFailed: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. [ 625.124130] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] [ 625.124130] env[61594]: INFO nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Terminating instance [ 625.124130] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "refresh_cache-fd68bfa8-62af-490d-bfad-5ef59a733336" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 625.124130] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquired lock "refresh_cache-fd68bfa8-62af-490d-bfad-5ef59a733336" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.124130] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 625.126839] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg c585b33e8cdc4484bb88610038d15f7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.138975] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c585b33e8cdc4484bb88610038d15f7e [ 625.187159] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.557566] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.557888] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.558085] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 625.558207] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 625.558802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg bc38754f217f423f8c84e1a3c7c64567 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.574819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc38754f217f423f8c84e1a3c7c64567 [ 625.576141] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 625.576284] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 625.576425] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 625.576762] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 625.577410] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.577410] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.577515] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.577622] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.577803] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.577986] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.579075] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 625.579785] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 625.580278] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg ecf6e79f05cd4d4dabe5066942a550ca in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.592370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecf6e79f05cd4d4dabe5066942a550ca [ 625.593334] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.593637] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.593781] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.593851] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 625.595023] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1872280f-0997-415b-b314-88c4c2952bdf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.610040] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41fa685b-e94e-4c8f-9f8a-f542095348d8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.626109] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dc6034-5258-4e9f-a1fb-f2840c4eb6c5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.638016] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93d38d5c-9db7-40a3-92b8-e2778d90e70b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.672188] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181532MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 625.672469] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.675112] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.675112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg bb5969d3f5d742c5b17366e56b81d518 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.707722] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb5969d3f5d742c5b17366e56b81d518 [ 625.711333] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg e2b0045baeab41c6afd258c3fc33092e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.721168] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquiring lock "151fefe2-b70a-4ea5-8b50-08c7968b10fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.721168] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Lock "151fefe2-b70a-4ea5-8b50-08c7968b10fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.721168] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 418c752a983d484ba16c8a7dc13719e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.738770] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 418c752a983d484ba16c8a7dc13719e3 [ 625.738770] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 625.740270] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 08e0c597bf9d40ec98c37f43fa066581 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.748313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2b0045baeab41c6afd258c3fc33092e [ 625.763817] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.764518] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 002e277e36ad454c83022518029c2240 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.778101] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 04fd7039-c2c8-4b78-8c3d-37eb66fe2115 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 625.778101] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance fd68bfa8-62af-490d-bfad-5ef59a733336 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 625.778101] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 625.778101] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f3253bf78a2a4a2499616f98da557475 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.781635] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 002e277e36ad454c83022518029c2240 [ 625.782742] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Releasing lock "refresh_cache-fd68bfa8-62af-490d-bfad-5ef59a733336" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 625.782742] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 625.782853] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 625.783378] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-20b830b7-da6a-47be-9ba9-832f2056edbf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.796748] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7178f08b-9a69-451a-b2c0-d58f6b7c7330 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.814157] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08e0c597bf9d40ec98c37f43fa066581 [ 625.814157] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3253bf78a2a4a2499616f98da557475 [ 625.822095] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 151fefe2-b70a-4ea5-8b50-08c7968b10fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 625.822563] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 625.822563] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 625.840491] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fd68bfa8-62af-490d-bfad-5ef59a733336 could not be found. [ 625.840874] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 625.840874] env[61594]: INFO nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Took 0.06 seconds to destroy the instance on the hypervisor. [ 625.841154] env[61594]: DEBUG oslo.service.loopingcall [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 625.841381] env[61594]: DEBUG nova.compute.manager [-] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 625.841469] env[61594]: DEBUG nova.network.neutron [-] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 625.852578] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.902826] env[61594]: DEBUG nova.network.neutron [-] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.903341] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 647488ff40324ee7852107d63fefda41 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.912082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 647488ff40324ee7852107d63fefda41 [ 625.912855] env[61594]: DEBUG nova.network.neutron [-] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.912855] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d0818e0434f94e67ac8d4d2775d0af09 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.920316] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91ab941-017f-4422-8c48-d71469c6d142 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.929517] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7736ab-33e7-4618-be73-2e3a6267f0ba {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.933823] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0818e0434f94e67ac8d4d2775d0af09 [ 625.933823] env[61594]: INFO nova.compute.manager [-] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Took 0.09 seconds to deallocate network for instance. [ 625.963115] env[61594]: DEBUG nova.compute.claims [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 625.963305] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.964067] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a06ad79-4ce3-4701-b87a-ec67fb5762ff {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.971852] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd4b887-f7f1-4e28-b718-9767934f0455 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.986533] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.986825] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg fa9fc723fce64aa39f139de6c05ac5d0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 625.999925] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa9fc723fce64aa39f139de6c05ac5d0 [ 626.000943] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 626.003500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg ab67da0ba1f04eb5b899237ed4777a60 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.024867] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab67da0ba1f04eb5b899237ed4777a60 [ 626.025714] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 626.025848] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.353s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.026105] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.174s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.028594] env[61594]: INFO nova.compute.claims [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 626.029784] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 870218e71949459f9b4bbecb5ffc6e87 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.080138] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 870218e71949459f9b4bbecb5ffc6e87 [ 626.084018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg ebf67f2aa6e842da9bdfefc096e1af2a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.093030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebf67f2aa6e842da9bdfefc096e1af2a [ 626.197400] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1bec25-9461-4516-a244-0de14f1e8fe2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.209670] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29f1441a-c5b6-492b-ac59-9cc180a86ff6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.255160] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27252ad6-817a-4527-b100-8e5eae2fe837 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.265872] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-587cb799-8f2c-4a08-8096-f7ed52816e65 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.283906] env[61594]: DEBUG nova.compute.provider_tree [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.284083] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg dd09e3f6884341778d54f6995e44f984 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.294260] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd09e3f6884341778d54f6995e44f984 [ 626.294581] env[61594]: DEBUG nova.scheduler.client.report [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 626.299279] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 3c92e1ca12024eadae98a2f27596438d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.336625] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c92e1ca12024eadae98a2f27596438d [ 626.336625] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.336776] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 626.339771] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 02b3622fbf644d238bb8b302ef2627d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.340782] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.377s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.342618] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg d98f45abf1c24ca8bcada6a54fd4345d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.416990] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d98f45abf1c24ca8bcada6a54fd4345d [ 626.424848] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02b3622fbf644d238bb8b302ef2627d7 [ 626.425915] env[61594]: DEBUG nova.compute.utils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 626.426650] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 7b08e407cbeb48398f3c5ada2fe45edd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.430819] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 626.473644] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b08e407cbeb48398f3c5ada2fe45edd [ 626.474386] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 626.476190] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg d5aa7d94cbaf43b4b06c333e3155ee65 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.532468] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5aa7d94cbaf43b4b06c333e3155ee65 [ 626.534169] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg b8e639d9e6cd4c75b582ef1308bc74ad in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.537065] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdaf5d1-d6b3-42c7-aef7-3ce83973533d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.547321] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc95a6b-c155-4cae-802c-d58f7ca020dd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.584023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8e639d9e6cd4c75b582ef1308bc74ad [ 626.586327] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 626.587872] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763edd75-274a-42f5-9c45-ecb94a4d560b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.596174] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdc3051-7eae-4516-bb7f-f27662a557ce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.613321] env[61594]: DEBUG nova.compute.provider_tree [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.613905] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg a79b43f74a284a18bc97a44e1004eef0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.625415] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 626.625647] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 626.625800] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 626.626757] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 626.626757] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 626.626757] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 626.626757] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 626.626757] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 626.626916] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 626.626916] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 626.627424] env[61594]: DEBUG nova.virt.hardware [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 626.628692] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df382d0-e845-4d48-88d8-33d0192a220c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.632105] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a79b43f74a284a18bc97a44e1004eef0 [ 626.634274] env[61594]: DEBUG nova.scheduler.client.report [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 626.636573] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg f06a66fa0b644722add4754220984359 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.646593] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aa68bd-41ba-48bf-8329-2bc56908ce79 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.663906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f06a66fa0b644722add4754220984359 [ 626.664519] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.671225] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Creating folder: Project (889dea457e904a0c84b6837db10948a8). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 626.672351] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.331s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.673611] env[61594]: ERROR nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Traceback (most recent call last): [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self.driver.spawn(context, instance, image_meta, [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] vm_ref = self.build_virtual_machine(instance, [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.673611] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] for vif in network_info: [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return self._sync_wrapper(fn, *args, **kwargs) [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self.wait() [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self[:] = self._gt.wait() [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return self._exit_event.wait() [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] result = hub.switch() [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 626.673946] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return self.greenlet.switch() [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] result = function(*args, **kwargs) [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] return func(*args, **kwargs) [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] raise e [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] nwinfo = self.network_api.allocate_for_instance( [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] created_port_ids = self._update_ports_for_instance( [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] with excutils.save_and_reraise_exception(): [ 626.674325] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] self.force_reraise() [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] raise self.value [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] updated_port = self._update_port( [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] _ensure_no_port_binding_failure(port) [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] raise exception.PortBindingFailed(port_id=port['id']) [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] nova.exception.PortBindingFailed: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. [ 626.674739] env[61594]: ERROR nova.compute.manager [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] [ 626.675077] env[61594]: DEBUG nova.compute.utils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 626.675077] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-07458a1f-4bc0-41b1-8a82-d3e0af696625 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.677837] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Build of instance fd68bfa8-62af-490d-bfad-5ef59a733336 was re-scheduled: Binding failed for port c28c4885-b80c-41e0-a036-b309a3352a9a, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 626.678448] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 626.678448] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "refresh_cache-fd68bfa8-62af-490d-bfad-5ef59a733336" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.678605] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquired lock "refresh_cache-fd68bfa8-62af-490d-bfad-5ef59a733336" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.679085] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 626.679085] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg f693cb45d22945a3a9a6685bb824a653 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 626.688071] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f693cb45d22945a3a9a6685bb824a653 [ 626.692842] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Created folder: Project (889dea457e904a0c84b6837db10948a8) in parent group-v277030. [ 626.693075] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Creating folder: Instances. Parent ref: group-v277034. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 626.693656] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6605ca87-d226-4019-948c-8c578eec06be {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.705451] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Created folder: Instances in parent group-v277034. [ 626.705535] env[61594]: DEBUG oslo.service.loopingcall [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 626.705690] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 626.705892] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a930589e-18ce-480b-b2f3-07c96d95ae11 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.725368] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.725368] env[61594]: value = "task-1291379" [ 626.725368] env[61594]: _type = "Task" [ 626.725368] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.732616] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291379, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.889610] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.170048] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.170613] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 41742f17dae04627913c174febaf59a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.188838] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41742f17dae04627913c174febaf59a1 [ 627.189978] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Releasing lock "refresh_cache-fd68bfa8-62af-490d-bfad-5ef59a733336" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.191557] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 627.191557] env[61594]: DEBUG nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 627.191557] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 627.236768] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291379, 'name': CreateVM_Task, 'duration_secs': 0.313008} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.241525] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.241644] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 2d2c118c68ef4523bad1edbf1911d0ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.242348] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 627.242978] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.243187] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.243459] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 627.243949] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee1c8884-5d4d-4a76-8f2b-2240e9c6c6fc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.249733] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Waiting for the task: (returnval){ [ 627.249733] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52d1ed7b-b14f-7d1c-52a3-d8e3f07fd498" [ 627.249733] env[61594]: _type = "Task" [ 627.249733] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.254301] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d2c118c68ef4523bad1edbf1911d0ed [ 627.254833] env[61594]: DEBUG nova.network.neutron [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.255332] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 33b79d12edd1457d8f43680cd166680e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.260593] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52d1ed7b-b14f-7d1c-52a3-d8e3f07fd498, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.268439] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33b79d12edd1457d8f43680cd166680e [ 627.268439] env[61594]: INFO nova.compute.manager [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: fd68bfa8-62af-490d-bfad-5ef59a733336] Took 0.08 seconds to deallocate network for instance. [ 627.270167] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 2e1d4401bc3e4037a0c16e1319f64a35 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.342733] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e1d4401bc3e4037a0c16e1319f64a35 [ 627.347799] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 329e14f1dfe14f66841f4dc8de434358 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.383184] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 329e14f1dfe14f66841f4dc8de434358 [ 627.417731] env[61594]: INFO nova.scheduler.client.report [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Deleted allocations for instance fd68bfa8-62af-490d-bfad-5ef59a733336 [ 627.425272] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg bc84f03d1e694ebe8b8e0d4d0a601f79 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.447819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc84f03d1e694ebe8b8e0d4d0a601f79 [ 627.450086] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37842a60-c148-4917-be75-8a9a7135040a tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "fd68bfa8-62af-490d-bfad-5ef59a733336" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.376s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.616515] env[61594]: ERROR nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. [ 627.616515] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 627.616515] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 627.616515] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 627.616515] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 627.616515] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 627.616515] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 627.616515] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 627.616515] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.616515] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 627.616515] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.616515] env[61594]: ERROR nova.compute.manager raise self.value [ 627.616515] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 627.616515] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 627.616515] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.616515] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 627.617210] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.617210] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 627.617210] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. [ 627.617210] env[61594]: ERROR nova.compute.manager [ 627.617210] env[61594]: Traceback (most recent call last): [ 627.617210] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 627.617210] env[61594]: listener.cb(fileno) [ 627.617210] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 627.617210] env[61594]: result = function(*args, **kwargs) [ 627.617210] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 627.617210] env[61594]: return func(*args, **kwargs) [ 627.617210] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 627.617210] env[61594]: raise e [ 627.617210] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 627.617210] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 627.617210] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 627.617210] env[61594]: created_port_ids = self._update_ports_for_instance( [ 627.617210] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 627.617210] env[61594]: with excutils.save_and_reraise_exception(): [ 627.617210] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.617210] env[61594]: self.force_reraise() [ 627.617210] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.617210] env[61594]: raise self.value [ 627.617210] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 627.617210] env[61594]: updated_port = self._update_port( [ 627.617210] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.617210] env[61594]: _ensure_no_port_binding_failure(port) [ 627.617210] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.617210] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 627.618115] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. [ 627.618115] env[61594]: Removing descriptor: 17 [ 627.618115] env[61594]: ERROR nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Traceback (most recent call last): [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] yield resources [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self.driver.spawn(context, instance, image_meta, [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 627.618115] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] vm_ref = self.build_virtual_machine(instance, [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] vif_infos = vmwarevif.get_vif_info(self._session, [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] for vif in network_info: [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return self._sync_wrapper(fn, *args, **kwargs) [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self.wait() [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self[:] = self._gt.wait() [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return self._exit_event.wait() [ 627.618738] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] result = hub.switch() [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return self.greenlet.switch() [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] result = function(*args, **kwargs) [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return func(*args, **kwargs) [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] raise e [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] nwinfo = self.network_api.allocate_for_instance( [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 627.619064] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] created_port_ids = self._update_ports_for_instance( [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] with excutils.save_and_reraise_exception(): [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self.force_reraise() [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] raise self.value [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] updated_port = self._update_port( [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] _ensure_no_port_binding_failure(port) [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 627.619432] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] raise exception.PortBindingFailed(port_id=port['id']) [ 627.619746] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] nova.exception.PortBindingFailed: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. [ 627.619746] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] [ 627.619746] env[61594]: INFO nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Terminating instance [ 627.623594] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "refresh_cache-73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.624020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquired lock "refresh_cache-73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.624020] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 627.624388] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 0b170585c3e14a848b09629f9dfa36f5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.635091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b170585c3e14a848b09629f9dfa36f5 [ 627.684531] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.761588] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.761588] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 627.761588] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.955070] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.955226] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 60cc05e82fce4870a2384c22119a6034 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 627.965976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60cc05e82fce4870a2384c22119a6034 [ 627.966669] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Releasing lock "refresh_cache-73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.967266] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 627.967527] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 627.968081] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88a9c2e8-1461-4b08-a821-42c17cbe65c2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.979561] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a049e4f-4b43-4287-9e2e-d2c2db2bc1a9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.007247] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd could not be found. [ 628.007247] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 628.007247] env[61594]: INFO nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 628.010972] env[61594]: DEBUG oslo.service.loopingcall [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.010972] env[61594]: DEBUG nova.compute.manager [-] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 628.010972] env[61594]: DEBUG nova.network.neutron [-] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 628.049921] env[61594]: DEBUG nova.network.neutron [-] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.049921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2febf203ec7648dab77e7d1388814939 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.062346] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2febf203ec7648dab77e7d1388814939 [ 628.064195] env[61594]: DEBUG nova.network.neutron [-] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.064195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d9dc8e2319604ec6af51186850982cc4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.076741] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9dc8e2319604ec6af51186850982cc4 [ 628.079488] env[61594]: INFO nova.compute.manager [-] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Took 0.07 seconds to deallocate network for instance. [ 628.083709] env[61594]: DEBUG nova.compute.claims [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 628.084121] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.084469] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.086797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 493ea40f3bcb4b74a22958b8a4594eed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.141904] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 493ea40f3bcb4b74a22958b8a4594eed [ 628.230474] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d735465-d2be-4750-9069-9d3ddcfa07d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.240571] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52368b9c-42db-40be-b47f-8b9049c354bd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.279677] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf677485-dfca-4f88-babb-65e178a455ee {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.291952] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7999e48-f1f8-487d-8f5d-529a6c9c107c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.306889] env[61594]: DEBUG nova.compute.provider_tree [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.309315] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 99aab9ed9c314fbfb2602e1a05c0b76b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.320699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99aab9ed9c314fbfb2602e1a05c0b76b [ 628.321828] env[61594]: DEBUG nova.scheduler.client.report [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 628.324113] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 35a5dd00144b4499b1d85a0f5b01f34f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.341025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35a5dd00144b4499b1d85a0f5b01f34f [ 628.341025] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.254s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 628.341025] env[61594]: ERROR nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. [ 628.341025] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Traceback (most recent call last): [ 628.341025] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 628.341025] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self.driver.spawn(context, instance, image_meta, [ 628.341025] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 628.341025] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.341025] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] vm_ref = self.build_virtual_machine(instance, [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] for vif in network_info: [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return self._sync_wrapper(fn, *args, **kwargs) [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self.wait() [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self[:] = self._gt.wait() [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 628.341730] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return self._exit_event.wait() [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] result = hub.switch() [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return self.greenlet.switch() [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] result = function(*args, **kwargs) [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] return func(*args, **kwargs) [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] raise e [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] nwinfo = self.network_api.allocate_for_instance( [ 628.342198] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] created_port_ids = self._update_ports_for_instance( [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] with excutils.save_and_reraise_exception(): [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] self.force_reraise() [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] raise self.value [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] updated_port = self._update_port( [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] _ensure_no_port_binding_failure(port) [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.342556] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] raise exception.PortBindingFailed(port_id=port['id']) [ 628.342885] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] nova.exception.PortBindingFailed: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. [ 628.342885] env[61594]: ERROR nova.compute.manager [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] [ 628.342885] env[61594]: DEBUG nova.compute.utils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 628.344575] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Build of instance 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd was re-scheduled: Binding failed for port 55260f69-7e9b-4733-95ed-eef39c8b95bf, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 628.346309] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 628.346309] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "refresh_cache-73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.346309] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquired lock "refresh_cache-73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.346309] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.346819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 3196e9ad73684ff08087047d6577c56b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.356719] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3196e9ad73684ff08087047d6577c56b [ 628.515513] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.836253] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.836253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 5051d77141814cd0ba2b85008efcd627 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.849232] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5051d77141814cd0ba2b85008efcd627 [ 628.849232] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Releasing lock "refresh_cache-73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 628.849232] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 628.849232] env[61594]: DEBUG nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 628.849232] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 628.887224] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.887969] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 57c425cc62804ad9a98c258e3db6f0ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.899101] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57c425cc62804ad9a98c258e3db6f0ff [ 628.899101] env[61594]: DEBUG nova.network.neutron [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.899101] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 86217375ec1a47f48a390350de4985c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.909797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86217375ec1a47f48a390350de4985c9 [ 628.910643] env[61594]: INFO nova.compute.manager [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd] Took 0.06 seconds to deallocate network for instance. [ 628.913938] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 358fdf934c6d4049a6d47b349ea42be6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 628.962734] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 358fdf934c6d4049a6d47b349ea42be6 [ 628.965764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 214efeca62914547aa950dc99fd1bf94 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.002674] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 214efeca62914547aa950dc99fd1bf94 [ 629.039645] env[61594]: INFO nova.scheduler.client.report [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Deleted allocations for instance 73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd [ 629.045962] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 23f1e30935fb4529a8fd3016f5312505 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.069664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23f1e30935fb4529a8fd3016f5312505 [ 629.070280] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ce5b7126-d3f0-4fe5-bc44-ac1395dd3fe3 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "73f9bdec-1446-4293-a1d9-cfa4ef2dd9dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.533s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.246262] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "8e95b690-9fa8-4a15-901f-1bf318bfff10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.246728] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "8e95b690-9fa8-4a15-901f-1bf318bfff10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.247646] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 6e8e89f37fcb468eb02270c171469fd9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.270982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e8e89f37fcb468eb02270c171469fd9 [ 629.271898] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 629.273571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 179d54e360b9404eb5ea93d5e19b093f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.317941] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 179d54e360b9404eb5ea93d5e19b093f [ 629.354262] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.354521] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.356171] env[61594]: INFO nova.compute.claims [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 629.357608] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg d237e361d2ef4100b97bd07bee7c704b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.414256] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d237e361d2ef4100b97bd07bee7c704b [ 629.418158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg cfa862488ae84994823b51d0381dafc2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.444204] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfa862488ae84994823b51d0381dafc2 [ 629.521422] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0658756-7942-48bd-9802-9f0e27255b39 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.533417] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0bfb55-b290-4306-831c-6074d287d6ec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.570663] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c56608f1-d41e-46c1-a7bb-92f79c229b6a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.577331] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7184d1a-f369-408c-9d82-9789958dd135 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.597041] env[61594]: DEBUG nova.compute.provider_tree [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.600617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 855e539eee5b439fb3f2ce3059441046 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.614857] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 855e539eee5b439fb3f2ce3059441046 [ 629.619427] env[61594]: DEBUG nova.scheduler.client.report [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 629.621462] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 5529e3aab67f406f89bc61d7d1f1bc68 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.645926] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5529e3aab67f406f89bc61d7d1f1bc68 [ 629.647208] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.292s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.647582] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 629.649117] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 9076700c8b094e318f7a9575f4b2c2a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.712444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9076700c8b094e318f7a9575f4b2c2a9 [ 629.712444] env[61594]: DEBUG nova.compute.utils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 629.712813] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 95d60e3beb5f4f86926c97b9e32e352c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.717263] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 629.717596] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 629.739959] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95d60e3beb5f4f86926c97b9e32e352c [ 629.740732] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 629.742394] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 824d46ae42d947e28f4a0a55292c51b6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.799749] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 824d46ae42d947e28f4a0a55292c51b6 [ 629.804381] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg ac37d172b70c4e2cbb5739b1416c0c28 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 629.847975] env[61594]: DEBUG nova.policy [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34cf0f0eff664be3be9da8676c5bd742', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8d235e06cce044529d4f982bf37af3e8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 629.854467] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac37d172b70c4e2cbb5739b1416c0c28 [ 629.854467] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 629.898320] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=192,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 629.899575] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 629.899575] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 629.899575] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 629.899575] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 629.899575] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 629.899784] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 629.899784] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 629.900180] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 629.900180] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 629.900274] env[61594]: DEBUG nova.virt.hardware [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 629.901342] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c946aa1-85a4-45fc-b92d-c18781b669d1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.917051] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67c174d-2700-4aa1-a150-92300128a020 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.472036] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Successfully created port: 3ee45c30-d84c-487e-b9b1-efc497508ce9 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 632.253786] env[61594]: ERROR nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. [ 632.253786] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 632.253786] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 632.253786] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 632.253786] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.253786] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 632.253786] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.253786] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 632.253786] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.253786] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 632.253786] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.253786] env[61594]: ERROR nova.compute.manager raise self.value [ 632.253786] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.253786] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 632.253786] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.253786] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 632.254399] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.254399] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 632.254399] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. [ 632.254399] env[61594]: ERROR nova.compute.manager [ 632.254399] env[61594]: Traceback (most recent call last): [ 632.254399] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 632.254399] env[61594]: listener.cb(fileno) [ 632.254399] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 632.254399] env[61594]: result = function(*args, **kwargs) [ 632.254399] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.254399] env[61594]: return func(*args, **kwargs) [ 632.254399] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 632.254399] env[61594]: raise e [ 632.254399] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 632.254399] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 632.254399] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.254399] env[61594]: created_port_ids = self._update_ports_for_instance( [ 632.254399] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.254399] env[61594]: with excutils.save_and_reraise_exception(): [ 632.254399] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.254399] env[61594]: self.force_reraise() [ 632.254399] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.254399] env[61594]: raise self.value [ 632.254399] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.254399] env[61594]: updated_port = self._update_port( [ 632.254399] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.254399] env[61594]: _ensure_no_port_binding_failure(port) [ 632.254399] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.254399] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 632.255080] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. [ 632.255080] env[61594]: Removing descriptor: 17 [ 632.255080] env[61594]: ERROR nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Traceback (most recent call last): [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] yield resources [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self.driver.spawn(context, instance, image_meta, [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.255080] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] vm_ref = self.build_virtual_machine(instance, [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] for vif in network_info: [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return self._sync_wrapper(fn, *args, **kwargs) [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self.wait() [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self[:] = self._gt.wait() [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return self._exit_event.wait() [ 632.255391] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] result = hub.switch() [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return self.greenlet.switch() [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] result = function(*args, **kwargs) [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return func(*args, **kwargs) [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] raise e [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] nwinfo = self.network_api.allocate_for_instance( [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.255748] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] created_port_ids = self._update_ports_for_instance( [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] with excutils.save_and_reraise_exception(): [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self.force_reraise() [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] raise self.value [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] updated_port = self._update_port( [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] _ensure_no_port_binding_failure(port) [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.256087] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] raise exception.PortBindingFailed(port_id=port['id']) [ 632.256383] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] nova.exception.PortBindingFailed: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. [ 632.256383] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] [ 632.256383] env[61594]: INFO nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Terminating instance [ 632.262999] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "refresh_cache-8e95b690-9fa8-4a15-901f-1bf318bfff10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.262999] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquired lock "refresh_cache-8e95b690-9fa8-4a15-901f-1bf318bfff10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.262999] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 632.262999] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg c75e54a52d134484afa367f20a19a326 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.273707] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c75e54a52d134484afa367f20a19a326 [ 632.296055] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.424025] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.424491] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 7c0d7b4e084d4cb69402f06f9ed3bef6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.437567] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c0d7b4e084d4cb69402f06f9ed3bef6 [ 632.438240] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Releasing lock "refresh_cache-8e95b690-9fa8-4a15-901f-1bf318bfff10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.438654] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 632.438818] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 632.439385] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-763f8810-5aa5-439f-acdc-fbe8b39d1b7c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.451746] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327ee7d9-e4ad-46f7-beda-257a1612a287 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.481820] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e95b690-9fa8-4a15-901f-1bf318bfff10 could not be found. [ 632.482131] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 632.482327] env[61594]: INFO nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Took 0.04 seconds to destroy the instance on the hypervisor. [ 632.482762] env[61594]: DEBUG oslo.service.loopingcall [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 632.483222] env[61594]: DEBUG nova.compute.manager [-] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 632.483312] env[61594]: DEBUG nova.network.neutron [-] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 632.504574] env[61594]: DEBUG nova.network.neutron [-] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.505254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5c5f01f3af6048d78e0810e5f91de314 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.515266] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c5f01f3af6048d78e0810e5f91de314 [ 632.516275] env[61594]: DEBUG nova.network.neutron [-] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.516275] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b4f303c4efb14aeb9a2056773750bea0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.526873] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4f303c4efb14aeb9a2056773750bea0 [ 632.527588] env[61594]: INFO nova.compute.manager [-] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Took 0.04 seconds to deallocate network for instance. [ 632.534154] env[61594]: DEBUG nova.compute.claims [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 632.534154] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.534154] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.535664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg c6bd134e15a64b36866422745e6f1ae7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.583473] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6bd134e15a64b36866422745e6f1ae7 [ 632.672383] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c4e569-03e2-4314-af79-5f04f3f61ba0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.682769] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0096c87e-96a1-4243-b6ef-1a60e89d8247 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.722940] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fabcf06-d7e9-4caf-a53d-ff03c21cafb6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.733632] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdde7eeb-fa4f-4a28-b5e1-caf7fde75e28 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.748867] env[61594]: DEBUG nova.compute.provider_tree [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 632.748867] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 224ea4d772f14f00ae3aaabd15df0bb8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.757989] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 224ea4d772f14f00ae3aaabd15df0bb8 [ 632.759035] env[61594]: DEBUG nova.scheduler.client.report [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 632.762602] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg b35a3a5d53614b8085aa64acf525e0b4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.785159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b35a3a5d53614b8085aa64acf525e0b4 [ 632.786782] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 632.787492] env[61594]: ERROR nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Traceback (most recent call last): [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self.driver.spawn(context, instance, image_meta, [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] vm_ref = self.build_virtual_machine(instance, [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.787492] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] for vif in network_info: [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return self._sync_wrapper(fn, *args, **kwargs) [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self.wait() [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self[:] = self._gt.wait() [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return self._exit_event.wait() [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] result = hub.switch() [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.787850] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return self.greenlet.switch() [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] result = function(*args, **kwargs) [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] return func(*args, **kwargs) [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] raise e [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] nwinfo = self.network_api.allocate_for_instance( [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] created_port_ids = self._update_ports_for_instance( [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] with excutils.save_and_reraise_exception(): [ 632.788175] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] self.force_reraise() [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] raise self.value [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] updated_port = self._update_port( [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] _ensure_no_port_binding_failure(port) [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] raise exception.PortBindingFailed(port_id=port['id']) [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] nova.exception.PortBindingFailed: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. [ 632.788520] env[61594]: ERROR nova.compute.manager [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] [ 632.789887] env[61594]: DEBUG nova.compute.utils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 632.791103] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Build of instance 8e95b690-9fa8-4a15-901f-1bf318bfff10 was re-scheduled: Binding failed for port 3ee45c30-d84c-487e-b9b1-efc497508ce9, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 632.791103] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 632.791226] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquiring lock "refresh_cache-8e95b690-9fa8-4a15-901f-1bf318bfff10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.792027] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Acquired lock "refresh_cache-8e95b690-9fa8-4a15-901f-1bf318bfff10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.792027] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 632.792027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg cce6e62a97074524832030b7c4eb8080 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.803243] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cce6e62a97074524832030b7c4eb8080 [ 632.843587] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.913812] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "63bccd39-d951-4187-872d-559cd7fead30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.913812] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "63bccd39-d951-4187-872d-559cd7fead30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.913812] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 6d2112eb57e84defa612d57e38e45ede in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.927806] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d2112eb57e84defa612d57e38e45ede [ 632.928451] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 632.930327] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 4a8a62d312a74ad086399b059f151d2b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 632.967023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a8a62d312a74ad086399b059f151d2b [ 632.998292] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.998292] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 632.998292] env[61594]: INFO nova.compute.claims [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 633.001827] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 6221b3fee0f64fff9acbd696b75eb41a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.050061] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6221b3fee0f64fff9acbd696b75eb41a [ 633.052410] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg fefc702d084943f9803a26d0b0869dcf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.067615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fefc702d084943f9803a26d0b0869dcf [ 633.092904] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.093502] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg d311069b9bde4ed1b01b349cb4e66e38 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.108479] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d311069b9bde4ed1b01b349cb4e66e38 [ 633.109131] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Releasing lock "refresh_cache-8e95b690-9fa8-4a15-901f-1bf318bfff10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 633.110023] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 633.110970] env[61594]: DEBUG nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 633.111264] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 633.165994] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.165994] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 79d74a66ec414e90be954a8d273448f0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.179344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79d74a66ec414e90be954a8d273448f0 [ 633.184926] env[61594]: DEBUG nova.network.neutron [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.184926] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 5ba12dd7eac6484cbae2ac998d54df45 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.189511] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fddd6d5-639e-48ce-80ce-926f6de5e430 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.197177] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ba12dd7eac6484cbae2ac998d54df45 [ 633.199227] env[61594]: INFO nova.compute.manager [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] [instance: 8e95b690-9fa8-4a15-901f-1bf318bfff10] Took 0.09 seconds to deallocate network for instance. [ 633.201764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 890dcef41075442bb8c15c6aeb76d7d2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.213496] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfd1928-b262-4440-8a2b-e0fc2830de74 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.254723] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47be59e3-2d2b-4596-b974-677ffea20064 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.263907] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820440bd-f3d6-45a6-a908-842a6de95e9c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.283805] env[61594]: DEBUG nova.compute.provider_tree [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 633.284314] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 64f8a40b7efd4e97bf62ccb633b0d9b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.285540] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 890dcef41075442bb8c15c6aeb76d7d2 [ 633.288456] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg ac8b36604c90442883aa7a2681fc84e6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.294181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64f8a40b7efd4e97bf62ccb633b0d9b1 [ 633.295111] env[61594]: DEBUG nova.scheduler.client.report [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 633.297512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 74f83d617f83436bbc8f693fd6313c12 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.324033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74f83d617f83436bbc8f693fd6313c12 [ 633.325068] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.329s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.326609] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 633.327359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 9cf57dff6efe4ebd9ee5f1e9132776d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.352843] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac8b36604c90442883aa7a2681fc84e6 [ 633.381021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cf57dff6efe4ebd9ee5f1e9132776d3 [ 633.381021] env[61594]: DEBUG nova.compute.utils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 633.381021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 9ad65a33eff540b0962846333198c2c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.382828] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 633.383413] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 633.389265] env[61594]: INFO nova.scheduler.client.report [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Deleted allocations for instance 8e95b690-9fa8-4a15-901f-1bf318bfff10 [ 633.401398] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Expecting reply to msg 7981eb272c594f8182b464aa700e74ae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.403972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ad65a33eff540b0962846333198c2c3 [ 633.404815] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 633.407972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 70019644760f4e4aa6be42c32f1315dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.424210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7981eb272c594f8182b464aa700e74ae [ 633.424941] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1b45b4e9-2700-41dc-a982-68d4e2d91040 tempest-ListServerFiltersTestJSON-89530523 tempest-ListServerFiltersTestJSON-89530523-project-member] Lock "8e95b690-9fa8-4a15-901f-1bf318bfff10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 4.178s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 633.461950] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70019644760f4e4aa6be42c32f1315dc [ 633.463603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 21d18cb701c640399c264951988f1ad0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 633.504036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21d18cb701c640399c264951988f1ad0 [ 633.505943] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 633.533073] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 633.533335] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 633.533573] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 633.534366] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 633.534366] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 633.534366] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 633.534366] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 633.534366] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 633.534546] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 633.534626] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 633.534791] env[61594]: DEBUG nova.virt.hardware [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 633.535699] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9478f778-c63d-4399-ac10-f5f38a142fb5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.539793] env[61594]: DEBUG nova.policy [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39dd18ce542f42e08215016404ffe9dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3952a0eb9b246b3981a76df98b855f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 633.546509] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a871a85-2778-4d0b-93be-6d057be54ed3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.539444] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "98916e4a-561b-4c17-9903-de88c3678f13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.539444] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "98916e4a-561b-4c17-9903-de88c3678f13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.539444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg c55a6df9acac49649a5e60a647adfe69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.552406] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c55a6df9acac49649a5e60a647adfe69 [ 634.552576] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 634.554486] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 0ea3fba03361481a9858807fd7c4be08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.591653] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ea3fba03361481a9858807fd7c4be08 [ 634.599527] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Successfully created port: 855ab017-300e-4d43-a4cc-2423f56b7a11 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.622789] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 634.623067] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.624675] env[61594]: INFO nova.compute.claims [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 634.627740] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 9770fd8ce89041088bb283e437e9a061 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.672805] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9770fd8ce89041088bb283e437e9a061 [ 634.674701] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 215f9a5e0c284e42a792e3c77aa5a6d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.686323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 215f9a5e0c284e42a792e3c77aa5a6d7 [ 634.781372] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2261bdcd-75a9-42a3-8d9f-0a4bdd146706 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.791656] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814bb1ba-845e-44ad-8702-8ef36ce89529 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.834843] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edda71eb-e113-4187-a7f0-19549ba65f3f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.842943] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa09462-2a98-42df-bcef-cfb4e36953c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.856628] env[61594]: DEBUG nova.compute.provider_tree [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.857108] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg f80b6ab80a9a4c42ac902132176f5062 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.865374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f80b6ab80a9a4c42ac902132176f5062 [ 634.866546] env[61594]: DEBUG nova.scheduler.client.report [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 634.869251] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 73d784af68fe46e09b484ada9b4ac54d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.886997] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73d784af68fe46e09b484ada9b4ac54d [ 634.887879] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.265s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.888194] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 634.891114] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg b48bb523d99644a595c192de410f8b61 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.936624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b48bb523d99644a595c192de410f8b61 [ 634.937886] env[61594]: DEBUG nova.compute.utils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 634.938517] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 9aa70d2e2d664ddf8f188be044fea715 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.939693] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 634.939865] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 634.956223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9aa70d2e2d664ddf8f188be044fea715 [ 634.956634] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 634.958406] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 7c29ee6c06d748b78dfa2c520a8f32fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 634.994403] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c29ee6c06d748b78dfa2c520a8f32fc [ 634.996302] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg d4b7c58744b74e3f8e431cb9e204c2b4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 635.029807] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4b7c58744b74e3f8e431cb9e204c2b4 [ 635.031247] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 635.059419] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 635.059688] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 635.059845] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 635.060034] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 635.060221] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 635.060410] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 635.060622] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 635.060778] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 635.060940] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 635.061161] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 635.061343] env[61594]: DEBUG nova.virt.hardware [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 635.062504] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff586b1-38b6-47ca-899a-d1bc4a296b9d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.071989] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6161cb-13ed-4aa0-950a-684f32805548 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.163489] env[61594]: DEBUG nova.policy [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a893b6f20a704d77968791ca48532894', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b410a9b0a0a54c00bbbaa088cd81b957', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 635.884220] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Successfully created port: 3c3a23b2-f923-4a4d-9143-40f152e37a59 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.172689] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Acquiring lock "0ce4707a-de75-438a-be72-d829478bfdff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.174617] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Lock "0ce4707a-de75-438a-be72-d829478bfdff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.174617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 5946581c719e4ccab236f664afe7e52e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.190960] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5946581c719e4ccab236f664afe7e52e [ 636.191763] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 636.193842] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg aba96a72770e4b8ca7e6f17f697ee051 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.269149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aba96a72770e4b8ca7e6f17f697ee051 [ 636.311152] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.312320] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 636.313529] env[61594]: INFO nova.compute.claims [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 636.315343] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 0007d41d173c461c8e0690aba9f72d09 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.376800] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0007d41d173c461c8e0690aba9f72d09 [ 636.378509] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg ec5eaba7ac9d467db28af99ad4b81349 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.397374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec5eaba7ac9d467db28af99ad4b81349 [ 636.511951] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6171a343-ef97-44c4-ba11-7d3d8e52d145 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.521025] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21627322-b6fd-4561-94f9-23dd90031eb4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.560860] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a1acee-1ab7-4e7b-b7df-26354c5f96f6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.574205] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e293a7-536c-4495-a271-8a60e05b8e70 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.588868] env[61594]: DEBUG nova.compute.provider_tree [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.589615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 396cec1b63774656934bce27c74f6298 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.599609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 396cec1b63774656934bce27c74f6298 [ 636.600619] env[61594]: DEBUG nova.scheduler.client.report [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 636.603528] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 737c7fe323704c099af33c2e8b6a3587 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.625245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 737c7fe323704c099af33c2e8b6a3587 [ 636.626598] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.315s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 636.627253] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 636.629373] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg e1a6e05516e94d73ad27e0a98cfe58ee in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.682016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1a6e05516e94d73ad27e0a98cfe58ee [ 636.682016] env[61594]: DEBUG nova.compute.utils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 636.682016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg d89316c33cc04001b8a0cf880853fa89 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.682819] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 636.683172] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 636.701349] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d89316c33cc04001b8a0cf880853fa89 [ 636.702314] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 636.704123] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 7027db2525454a81a5239063d525a4a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.724742] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Successfully created port: 67790ac8-de66-4ed2-a07c-71ee46e65aa6 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.743105] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7027db2525454a81a5239063d525a4a4 [ 636.747102] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 93cbda29b1574e4f81a24510ea246e3d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 636.789197] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93cbda29b1574e4f81a24510ea246e3d [ 636.790248] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 636.828244] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 636.828384] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 636.828640] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 636.828684] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 636.828823] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 636.828970] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 636.829485] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 636.829668] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 636.829846] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 636.830180] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 636.830248] env[61594]: DEBUG nova.virt.hardware [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 636.831488] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a44c2c46-f79e-4b26-b5a4-076d330239b8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.840975] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc785f5-66af-4d80-8712-af7ed7ae546a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.962806] env[61594]: DEBUG nova.policy [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c55f4f7f38974ff6a84cabbb31fbf5d1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09f40d4f7f5f4191ab268f6dc55509c5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 637.522249] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Successfully created port: 74ad1a0e-709d-4fc3-8965-c891a88b81f5 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.678162] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "00e11309-0f82-49d5-b4f5-02e2bdb517e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.679152] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "00e11309-0f82-49d5-b4f5-02e2bdb517e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.679728] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg d5b2d6d08df54ca28381705a98650fcf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 637.690009] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5b2d6d08df54ca28381705a98650fcf [ 637.690487] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 637.692272] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg b167b246fa0d457eb2db83adf2b9efba in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 637.734277] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b167b246fa0d457eb2db83adf2b9efba [ 637.762614] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 637.764026] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.764544] env[61594]: INFO nova.compute.claims [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 637.766206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg b88f24c62b4746be8103e58e388a4973 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 637.810907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b88f24c62b4746be8103e58e388a4973 [ 637.812982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg ab3468b8302f4092ad86054fac855666 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 637.829531] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab3468b8302f4092ad86054fac855666 [ 637.932101] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580de736-ef44-4f3d-9896-b26ee9127f93 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.940800] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29204757-4752-482e-91a0-8fc9837d3010 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.976179] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2098f87e-0069-4e1b-a33f-65330594a389 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.984026] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8067256-7c1a-4957-b3c0-f295037a3ad5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.998304] env[61594]: DEBUG nova.compute.provider_tree [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 637.999238] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 6d1de085c3c74a6fa8cf1ff41e8aab62 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 638.014153] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d1de085c3c74a6fa8cf1ff41e8aab62 [ 638.014153] env[61594]: DEBUG nova.scheduler.client.report [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 638.015576] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 1b13df1ac10e44a7bfbb4c3a6a1583ab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 638.029849] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b13df1ac10e44a7bfbb4c3a6a1583ab [ 638.030875] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.268s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.031469] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 638.033295] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 7a492e3d40f64046807951f1bdd0016d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 638.078348] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a492e3d40f64046807951f1bdd0016d [ 638.080376] env[61594]: DEBUG nova.compute.utils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 638.080376] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg b9e4054f294d4bfd949b9736807cf728 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 638.080704] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 638.082982] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 638.108722] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9e4054f294d4bfd949b9736807cf728 [ 638.109546] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 638.111307] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg b045a042791b4a3b96c7f235c58b20ac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 638.162786] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b045a042791b4a3b96c7f235c58b20ac [ 638.166591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg ba1dc58ddefd4f82874626efc55df098 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 638.212815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba1dc58ddefd4f82874626efc55df098 [ 638.214062] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 638.256740] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 638.256904] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 638.257087] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 638.257294] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 638.257408] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 638.257527] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 638.257731] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 638.257909] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 638.258071] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 638.258254] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 638.258428] env[61594]: DEBUG nova.virt.hardware [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 638.259415] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a7aa0b4-5ac7-4570-adbf-c02817c7abcf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.271259] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c3ba6e-c233-4f71-8194-f173241b9191 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.457206] env[61594]: DEBUG nova.policy [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26298f53a4904c86a795c9a1717500e7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a36fff95014d4d868f65dafefe1958fe', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 639.000920] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Successfully created port: 04789809-11c8-496c-ad05-374e7e031000 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.940966] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Successfully created port: 2c49d0c6-a457-48eb-971f-541920c2e4f2 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 642.511464] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Acquiring lock "1385849e-9e64-4062-b6aa-300e6e7eab3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.511464] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Lock "1385849e-9e64-4062-b6aa-300e6e7eab3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.511464] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 47c4357d5cde4818a15497afc846ea9d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.528393] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47c4357d5cde4818a15497afc846ea9d [ 642.529180] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 642.531016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg e84dd66a789f45f795a42fb8dea89d66 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.585706] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e84dd66a789f45f795a42fb8dea89d66 [ 642.609228] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 642.609510] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.611156] env[61594]: INFO nova.compute.claims [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.612787] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 3ed885b5544d40488df2468b2a1478ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.628972] env[61594]: ERROR nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. [ 642.628972] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 642.628972] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 642.628972] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 642.628972] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.628972] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 642.628972] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.628972] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 642.628972] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.628972] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 642.628972] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.628972] env[61594]: ERROR nova.compute.manager raise self.value [ 642.628972] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.628972] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 642.628972] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.628972] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 642.630064] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.630064] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 642.630064] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. [ 642.630064] env[61594]: ERROR nova.compute.manager [ 642.630064] env[61594]: Traceback (most recent call last): [ 642.630064] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 642.630064] env[61594]: listener.cb(fileno) [ 642.630064] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 642.630064] env[61594]: result = function(*args, **kwargs) [ 642.630064] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.630064] env[61594]: return func(*args, **kwargs) [ 642.630064] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 642.630064] env[61594]: raise e [ 642.630064] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 642.630064] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 642.630064] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.630064] env[61594]: created_port_ids = self._update_ports_for_instance( [ 642.630064] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.630064] env[61594]: with excutils.save_and_reraise_exception(): [ 642.630064] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.630064] env[61594]: self.force_reraise() [ 642.630064] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.630064] env[61594]: raise self.value [ 642.630064] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.630064] env[61594]: updated_port = self._update_port( [ 642.630064] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.630064] env[61594]: _ensure_no_port_binding_failure(port) [ 642.630064] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.630064] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 642.630786] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. [ 642.630786] env[61594]: Removing descriptor: 19 [ 642.630786] env[61594]: ERROR nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Traceback (most recent call last): [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] yield resources [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self.driver.spawn(context, instance, image_meta, [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.630786] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] vm_ref = self.build_virtual_machine(instance, [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] for vif in network_info: [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return self._sync_wrapper(fn, *args, **kwargs) [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self.wait() [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self[:] = self._gt.wait() [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return self._exit_event.wait() [ 642.631175] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] result = hub.switch() [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return self.greenlet.switch() [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] result = function(*args, **kwargs) [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return func(*args, **kwargs) [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] raise e [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] nwinfo = self.network_api.allocate_for_instance( [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.632022] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] created_port_ids = self._update_ports_for_instance( [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] with excutils.save_and_reraise_exception(): [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self.force_reraise() [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] raise self.value [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] updated_port = self._update_port( [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] _ensure_no_port_binding_failure(port) [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.632382] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] raise exception.PortBindingFailed(port_id=port['id']) [ 642.632689] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] nova.exception.PortBindingFailed: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. [ 642.632689] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] [ 642.632689] env[61594]: INFO nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Terminating instance [ 642.632689] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "refresh_cache-98916e4a-561b-4c17-9903-de88c3678f13" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.632689] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquired lock "refresh_cache-98916e4a-561b-4c17-9903-de88c3678f13" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.632689] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.633730] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 5aee10ff4da34bf291702c67de6fe962 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.642688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aee10ff4da34bf291702c67de6fe962 [ 642.649914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ed885b5544d40488df2468b2a1478ef [ 642.651702] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 21f7f43c4dae4fc791828fa0f6e2c323 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.662435] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21f7f43c4dae4fc791828fa0f6e2c323 [ 642.693029] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 642.808517] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1cce54-7206-4b51-9c07-219f25fe84f5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.825391] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27078e14-908b-423e-b715-d611f9eb252a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.864540] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5417b83c-6345-47e8-9c2c-7819c5a17a6e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.876781] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e83f6cd-a524-404b-8619-d5df6370d841 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.895231] env[61594]: DEBUG nova.compute.provider_tree [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 642.895759] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 18488894b1b040f59df46a9982429c95 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.908961] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18488894b1b040f59df46a9982429c95 [ 642.909561] env[61594]: DEBUG nova.scheduler.client.report [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 642.913983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 4405ba69508c4f13b16f5b9ae2550cef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.935528] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4405ba69508c4f13b16f5b9ae2550cef [ 642.936442] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.936894] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 642.939043] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg d2a856368ae741f0b1c774ea3b6d88c1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.977697] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2a856368ae741f0b1c774ea3b6d88c1 [ 642.978969] env[61594]: DEBUG nova.compute.utils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 642.979667] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 04c0ccc83a584c76badb21d82ad9d9f2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 642.981412] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 642.981640] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 642.997357] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04c0ccc83a584c76badb21d82ad9d9f2 [ 642.997357] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 643.000323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg a4bc8ca1c42240aeb0741656e69195f2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.040337] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4bc8ca1c42240aeb0741656e69195f2 [ 643.043309] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 45a8a17d7c034276a0e432940b62c639 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.063386] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.063926] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 0981ddd6df8d426abc4ee67bfc268c10 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.073751] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0981ddd6df8d426abc4ee67bfc268c10 [ 643.074370] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Releasing lock "refresh_cache-98916e4a-561b-4c17-9903-de88c3678f13" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.074728] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 643.074918] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 643.075482] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da87a65c-4318-4325-a1f9-8cb1a36af028 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.088415] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c73e0edf-62e3-46a6-8540-c9c649290ad3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.100281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 45a8a17d7c034276a0e432940b62c639 [ 643.100797] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 643.115633] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98916e4a-561b-4c17-9903-de88c3678f13 could not be found. [ 643.117039] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 643.117039] env[61594]: INFO nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Took 0.04 seconds to destroy the instance on the hypervisor. [ 643.119190] env[61594]: DEBUG oslo.service.loopingcall [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.122175] env[61594]: DEBUG nova.policy [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fec462edbe9546b587ce798c89d1cd3a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e2f2a89dde94e7ab986cfa80346c4e5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 643.122444] env[61594]: DEBUG nova.compute.manager [-] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 643.122444] env[61594]: DEBUG nova.network.neutron [-] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 643.139103] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 643.139246] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 643.139417] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.139610] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 643.139762] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.139910] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 643.140155] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 643.140323] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 643.140509] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 643.140812] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 643.141371] env[61594]: DEBUG nova.virt.hardware [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 643.142120] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a2fb4d-3d6e-4728-be20-4b4510e93987 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.150670] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b8fab6-a163-4506-bbfb-edb9dcc73f5e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.164988] env[61594]: ERROR nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. [ 643.164988] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 643.164988] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 643.164988] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 643.164988] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.164988] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 643.164988] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.164988] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 643.164988] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.164988] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 643.164988] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.164988] env[61594]: ERROR nova.compute.manager raise self.value [ 643.164988] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.164988] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 643.164988] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.164988] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 643.165491] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.165491] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 643.165491] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. [ 643.165491] env[61594]: ERROR nova.compute.manager [ 643.165491] env[61594]: Traceback (most recent call last): [ 643.165491] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 643.165491] env[61594]: listener.cb(fileno) [ 643.165491] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 643.165491] env[61594]: result = function(*args, **kwargs) [ 643.165491] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.165491] env[61594]: return func(*args, **kwargs) [ 643.165491] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 643.165491] env[61594]: raise e [ 643.165491] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 643.165491] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 643.165491] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.165491] env[61594]: created_port_ids = self._update_ports_for_instance( [ 643.165491] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.165491] env[61594]: with excutils.save_and_reraise_exception(): [ 643.165491] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.165491] env[61594]: self.force_reraise() [ 643.165491] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.165491] env[61594]: raise self.value [ 643.165491] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.165491] env[61594]: updated_port = self._update_port( [ 643.165491] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.165491] env[61594]: _ensure_no_port_binding_failure(port) [ 643.165491] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.165491] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 643.166254] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. [ 643.166254] env[61594]: Removing descriptor: 17 [ 643.166254] env[61594]: ERROR nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] Traceback (most recent call last): [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] yield resources [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self.driver.spawn(context, instance, image_meta, [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.166254] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] vm_ref = self.build_virtual_machine(instance, [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] for vif in network_info: [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return self._sync_wrapper(fn, *args, **kwargs) [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self.wait() [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self[:] = self._gt.wait() [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return self._exit_event.wait() [ 643.166573] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] result = hub.switch() [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return self.greenlet.switch() [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] result = function(*args, **kwargs) [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return func(*args, **kwargs) [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] raise e [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] nwinfo = self.network_api.allocate_for_instance( [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.166977] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] created_port_ids = self._update_ports_for_instance( [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] with excutils.save_and_reraise_exception(): [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self.force_reraise() [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] raise self.value [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] updated_port = self._update_port( [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] _ensure_no_port_binding_failure(port) [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.167333] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] raise exception.PortBindingFailed(port_id=port['id']) [ 643.167651] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] nova.exception.PortBindingFailed: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. [ 643.167651] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] [ 643.167651] env[61594]: INFO nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Terminating instance [ 643.169739] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "refresh_cache-63bccd39-d951-4187-872d-559cd7fead30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.170134] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquired lock "refresh_cache-63bccd39-d951-4187-872d-559cd7fead30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.170134] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.170490] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 307f3ff3b39842ab83a4e3d6484d8182 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.174986] env[61594]: DEBUG nova.network.neutron [-] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.175323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ced7016bb70e4270b41435209f2ad794 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.180389] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 307f3ff3b39842ab83a4e3d6484d8182 [ 643.189604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ced7016bb70e4270b41435209f2ad794 [ 643.189604] env[61594]: DEBUG nova.network.neutron [-] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.189604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 09841d2d1d504bc79b7a160f31c56a0c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.204642] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 09841d2d1d504bc79b7a160f31c56a0c [ 643.205241] env[61594]: INFO nova.compute.manager [-] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Took 0.08 seconds to deallocate network for instance. [ 643.207639] env[61594]: DEBUG nova.compute.claims [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 643.207849] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 643.208044] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 643.209992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 5d9c216abf824ae8a856f0d527b727dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.270443] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d9c216abf824ae8a856f0d527b727dc [ 643.307634] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.420170] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5b0936-a1e3-47fe-9da6-a86042a9ea89 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.428418] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb128f04-7b8f-4af8-a749-673e71c6649f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.459519] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3f1f2c1-a2ea-40fa-b2bd-123dd3fcee0e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.469095] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c444522e-dd23-42c8-985c-84071052830e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.484137] env[61594]: DEBUG nova.compute.provider_tree [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.484507] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg e7a1e28a4f0a4c28a9f0590e516ca1bf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.492400] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7a1e28a4f0a4c28a9f0590e516ca1bf [ 643.493191] env[61594]: DEBUG nova.scheduler.client.report [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 643.495395] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg d282828043e644debc2a672ed38d7cce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.509322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d282828043e644debc2a672ed38d7cce [ 643.510206] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.302s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 643.510822] env[61594]: ERROR nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Traceback (most recent call last): [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self.driver.spawn(context, instance, image_meta, [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self._vmops.spawn(context, instance, image_meta, injected_files, [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] vm_ref = self.build_virtual_machine(instance, [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] vif_infos = vmwarevif.get_vif_info(self._session, [ 643.510822] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] for vif in network_info: [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return self._sync_wrapper(fn, *args, **kwargs) [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self.wait() [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self[:] = self._gt.wait() [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return self._exit_event.wait() [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] result = hub.switch() [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 643.511505] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return self.greenlet.switch() [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] result = function(*args, **kwargs) [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] return func(*args, **kwargs) [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] raise e [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] nwinfo = self.network_api.allocate_for_instance( [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] created_port_ids = self._update_ports_for_instance( [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] with excutils.save_and_reraise_exception(): [ 643.512041] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] self.force_reraise() [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] raise self.value [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] updated_port = self._update_port( [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] _ensure_no_port_binding_failure(port) [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] raise exception.PortBindingFailed(port_id=port['id']) [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] nova.exception.PortBindingFailed: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. [ 643.512507] env[61594]: ERROR nova.compute.manager [instance: 98916e4a-561b-4c17-9903-de88c3678f13] [ 643.512798] env[61594]: DEBUG nova.compute.utils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 643.513027] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Build of instance 98916e4a-561b-4c17-9903-de88c3678f13 was re-scheduled: Binding failed for port 67790ac8-de66-4ed2-a07c-71ee46e65aa6, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 643.513421] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 643.513641] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "refresh_cache-98916e4a-561b-4c17-9903-de88c3678f13" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 643.513781] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquired lock "refresh_cache-98916e4a-561b-4c17-9903-de88c3678f13" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 643.513935] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 643.514330] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg cc4143a0ba4b43bc8afe6127c07d6dd9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.522248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc4143a0ba4b43bc8afe6127c07d6dd9 [ 643.730622] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.821094] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.821641] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 8890e1bb3095414b8a2860eee8506d8e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 643.833370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8890e1bb3095414b8a2860eee8506d8e [ 643.834244] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Releasing lock "refresh_cache-63bccd39-d951-4187-872d-559cd7fead30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.834425] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 643.834619] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 643.835152] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e31e0703-2eb5-43ef-be25-a2e84f8cf432 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.847112] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acdcb48a-626c-4ddc-8542-2273b0afb85f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.873104] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 63bccd39-d951-4187-872d-559cd7fead30 could not be found. [ 643.873350] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 643.873532] env[61594]: INFO nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Took 0.04 seconds to destroy the instance on the hypervisor. [ 643.873836] env[61594]: DEBUG oslo.service.loopingcall [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 643.874217] env[61594]: DEBUG nova.compute.manager [-] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 643.874346] env[61594]: DEBUG nova.network.neutron [-] [instance: 63bccd39-d951-4187-872d-559cd7fead30] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.121271] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.121829] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 07067de0a3d0457d8e86e4c318d90c38 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.136556] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07067de0a3d0457d8e86e4c318d90c38 [ 644.136556] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Releasing lock "refresh_cache-98916e4a-561b-4c17-9903-de88c3678f13" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.136556] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 644.136672] env[61594]: DEBUG nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 644.136817] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.173866] env[61594]: DEBUG nova.network.neutron [-] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.218667] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.218667] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 9ce0de980c8544549196c5702ead9096 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.236460] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ce0de980c8544549196c5702ead9096 [ 644.237159] env[61594]: DEBUG nova.network.neutron [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.237764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 617e156681724b34be519153caba68b3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.260044] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 617e156681724b34be519153caba68b3 [ 644.260044] env[61594]: INFO nova.compute.manager [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 98916e4a-561b-4c17-9903-de88c3678f13] Took 0.12 seconds to deallocate network for instance. [ 644.261863] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 30fac57985e64498b4254e228318ee79 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.315834] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30fac57985e64498b4254e228318ee79 [ 644.319811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 6e21e7e346104ca9ab47de93fe1a19bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.377313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e21e7e346104ca9ab47de93fe1a19bd [ 644.408269] env[61594]: INFO nova.scheduler.client.report [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Deleted allocations for instance 98916e4a-561b-4c17-9903-de88c3678f13 [ 644.422020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg afa355e3f8b0499bafb62b45445e0890 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.433311] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Successfully created port: 17510c18-0039-426c-a5b8-1f817ff57d52 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 644.441366] env[61594]: ERROR nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. [ 644.441366] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 644.441366] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 644.441366] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 644.441366] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 644.441366] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 644.441366] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 644.441366] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 644.441366] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.441366] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 644.441366] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.441366] env[61594]: ERROR nova.compute.manager raise self.value [ 644.441366] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 644.441366] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 644.441366] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.441366] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 644.441828] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.441828] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 644.441828] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. [ 644.441828] env[61594]: ERROR nova.compute.manager [ 644.441828] env[61594]: Traceback (most recent call last): [ 644.441828] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 644.441828] env[61594]: listener.cb(fileno) [ 644.441828] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 644.441828] env[61594]: result = function(*args, **kwargs) [ 644.441828] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 644.441828] env[61594]: return func(*args, **kwargs) [ 644.441828] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 644.441828] env[61594]: raise e [ 644.441828] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 644.441828] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 644.441828] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 644.441828] env[61594]: created_port_ids = self._update_ports_for_instance( [ 644.441828] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 644.441828] env[61594]: with excutils.save_and_reraise_exception(): [ 644.441828] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.441828] env[61594]: self.force_reraise() [ 644.441828] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.441828] env[61594]: raise self.value [ 644.441828] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 644.441828] env[61594]: updated_port = self._update_port( [ 644.441828] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.441828] env[61594]: _ensure_no_port_binding_failure(port) [ 644.441828] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.441828] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 644.442551] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. [ 644.442551] env[61594]: Removing descriptor: 20 [ 644.442551] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afa355e3f8b0499bafb62b45445e0890 [ 644.442890] env[61594]: ERROR nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Traceback (most recent call last): [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] yield resources [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self.driver.spawn(context, instance, image_meta, [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] vm_ref = self.build_virtual_machine(instance, [ 644.442890] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] vif_infos = vmwarevif.get_vif_info(self._session, [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] for vif in network_info: [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return self._sync_wrapper(fn, *args, **kwargs) [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self.wait() [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self[:] = self._gt.wait() [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return self._exit_event.wait() [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 644.443186] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] result = hub.switch() [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return self.greenlet.switch() [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] result = function(*args, **kwargs) [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return func(*args, **kwargs) [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] raise e [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] nwinfo = self.network_api.allocate_for_instance( [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] created_port_ids = self._update_ports_for_instance( [ 644.443509] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] with excutils.save_and_reraise_exception(): [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self.force_reraise() [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] raise self.value [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] updated_port = self._update_port( [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] _ensure_no_port_binding_failure(port) [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] raise exception.PortBindingFailed(port_id=port['id']) [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] nova.exception.PortBindingFailed: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. [ 644.443832] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] [ 644.444194] env[61594]: INFO nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Terminating instance [ 644.445083] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Acquiring lock "refresh_cache-0ce4707a-de75-438a-be72-d829478bfdff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.445245] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Acquired lock "refresh_cache-0ce4707a-de75-438a-be72-d829478bfdff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.445410] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 644.447185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 026feea9f15f4a4083073e965f6611ac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.447281] env[61594]: DEBUG oslo_concurrency.lockutils [None req-da7f62a9-cb31-4f18-896e-526b55621fa7 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "98916e4a-561b-4c17-9903-de88c3678f13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 9.911s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.459562] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 026feea9f15f4a4083073e965f6611ac [ 644.518977] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.728849] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.729424] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 9dbd19ad2e1f4929a7dce19eb1a37bfd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.738067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dbd19ad2e1f4929a7dce19eb1a37bfd [ 644.738891] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Releasing lock "refresh_cache-0ce4707a-de75-438a-be72-d829478bfdff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 644.739294] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 644.739521] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 644.740088] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5f05f69d-b3b9-4f17-b75f-87cd5cc8614d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.751968] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b48b53-48db-438e-89d5-00d6ad7c235f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.779323] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0ce4707a-de75-438a-be72-d829478bfdff could not be found. [ 644.779583] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 644.779770] env[61594]: INFO nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Took 0.04 seconds to destroy the instance on the hypervisor. [ 644.780024] env[61594]: DEBUG oslo.service.loopingcall [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 644.780271] env[61594]: DEBUG nova.compute.manager [-] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 644.780315] env[61594]: DEBUG nova.network.neutron [-] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 644.816385] env[61594]: DEBUG nova.network.neutron [-] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.816919] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 00036065b7974e0bae2fb43f1cbc7975 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.834127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00036065b7974e0bae2fb43f1cbc7975 [ 644.834378] env[61594]: DEBUG nova.network.neutron [-] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.835013] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2e88f5fda0e74202b3a846e4e2a6a1df in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.847906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e88f5fda0e74202b3a846e4e2a6a1df [ 644.850989] env[61594]: INFO nova.compute.manager [-] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Took 0.07 seconds to deallocate network for instance. [ 644.850989] env[61594]: DEBUG nova.compute.claims [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 644.850989] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 644.851402] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.853225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg af9d53277690477ba5f16ccbddbf3026 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.861914] env[61594]: ERROR nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. [ 644.861914] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 644.861914] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 644.861914] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 644.861914] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 644.861914] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 644.861914] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 644.861914] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 644.861914] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.861914] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 644.861914] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.861914] env[61594]: ERROR nova.compute.manager raise self.value [ 644.861914] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 644.861914] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 644.861914] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.861914] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 644.862370] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.862370] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 644.862370] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. [ 644.862370] env[61594]: ERROR nova.compute.manager [ 644.862370] env[61594]: Traceback (most recent call last): [ 644.862370] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 644.862370] env[61594]: listener.cb(fileno) [ 644.862370] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 644.862370] env[61594]: result = function(*args, **kwargs) [ 644.862370] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 644.862370] env[61594]: return func(*args, **kwargs) [ 644.862370] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 644.862370] env[61594]: raise e [ 644.862370] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 644.862370] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 644.862370] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 644.862370] env[61594]: created_port_ids = self._update_ports_for_instance( [ 644.862370] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 644.862370] env[61594]: with excutils.save_and_reraise_exception(): [ 644.862370] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.862370] env[61594]: self.force_reraise() [ 644.862370] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.862370] env[61594]: raise self.value [ 644.862370] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 644.862370] env[61594]: updated_port = self._update_port( [ 644.862370] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.862370] env[61594]: _ensure_no_port_binding_failure(port) [ 644.862370] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.862370] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 644.863193] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. [ 644.863193] env[61594]: Removing descriptor: 21 [ 644.863193] env[61594]: ERROR nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Traceback (most recent call last): [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] yield resources [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self.driver.spawn(context, instance, image_meta, [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 644.863193] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] vm_ref = self.build_virtual_machine(instance, [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] vif_infos = vmwarevif.get_vif_info(self._session, [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] for vif in network_info: [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return self._sync_wrapper(fn, *args, **kwargs) [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self.wait() [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self[:] = self._gt.wait() [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return self._exit_event.wait() [ 644.863524] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] result = hub.switch() [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return self.greenlet.switch() [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] result = function(*args, **kwargs) [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return func(*args, **kwargs) [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] raise e [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] nwinfo = self.network_api.allocate_for_instance( [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 644.863872] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] created_port_ids = self._update_ports_for_instance( [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] with excutils.save_and_reraise_exception(): [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self.force_reraise() [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] raise self.value [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] updated_port = self._update_port( [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] _ensure_no_port_binding_failure(port) [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 644.864268] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] raise exception.PortBindingFailed(port_id=port['id']) [ 644.864576] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] nova.exception.PortBindingFailed: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. [ 644.864576] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] [ 644.864576] env[61594]: INFO nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Terminating instance [ 644.868492] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "refresh_cache-00e11309-0f82-49d5-b4f5-02e2bdb517e8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 644.868492] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquired lock "refresh_cache-00e11309-0f82-49d5-b4f5-02e2bdb517e8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.868492] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 644.868492] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 823454d25e5147e88b75ea46c9d3cfe0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 644.875786] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 823454d25e5147e88b75ea46c9d3cfe0 [ 644.906037] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 644.908417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af9d53277690477ba5f16ccbddbf3026 [ 645.033947] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0636c2-6778-44f9-b11f-f1a94b620a0a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.044089] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ae7045-0ccc-478c-87bf-4f0103ebc16d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.082640] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75ad911d-e55b-43e4-b2b9-57790f4d2032 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.091100] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67d3160c-e42a-4c7f-8d83-5046eb3d84ea {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.105954] env[61594]: DEBUG nova.compute.provider_tree [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.106652] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 56b08c01b7684bd9910a16208e1a4b4b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.117260] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56b08c01b7684bd9910a16208e1a4b4b [ 645.118376] env[61594]: DEBUG nova.scheduler.client.report [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 645.120827] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 90a5961747cf4230bc60bfe9f8f6fdfe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.137144] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90a5961747cf4230bc60bfe9f8f6fdfe [ 645.137966] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.287s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.138605] env[61594]: ERROR nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Traceback (most recent call last): [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self.driver.spawn(context, instance, image_meta, [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] vm_ref = self.build_virtual_machine(instance, [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.138605] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] for vif in network_info: [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return self._sync_wrapper(fn, *args, **kwargs) [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self.wait() [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self[:] = self._gt.wait() [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return self._exit_event.wait() [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] result = hub.switch() [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.138940] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return self.greenlet.switch() [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] result = function(*args, **kwargs) [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] return func(*args, **kwargs) [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] raise e [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] nwinfo = self.network_api.allocate_for_instance( [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] created_port_ids = self._update_ports_for_instance( [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] with excutils.save_and_reraise_exception(): [ 645.139271] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] self.force_reraise() [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] raise self.value [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] updated_port = self._update_port( [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] _ensure_no_port_binding_failure(port) [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] raise exception.PortBindingFailed(port_id=port['id']) [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] nova.exception.PortBindingFailed: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. [ 645.139630] env[61594]: ERROR nova.compute.manager [instance: 0ce4707a-de75-438a-be72-d829478bfdff] [ 645.139902] env[61594]: DEBUG nova.compute.utils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 645.141129] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Build of instance 0ce4707a-de75-438a-be72-d829478bfdff was re-scheduled: Binding failed for port 04789809-11c8-496c-ad05-374e7e031000, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 645.141617] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 645.141919] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Acquiring lock "refresh_cache-0ce4707a-de75-438a-be72-d829478bfdff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.142140] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Acquired lock "refresh_cache-0ce4707a-de75-438a-be72-d829478bfdff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.142367] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.142805] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg b11b7810a31a474ea27a32970b3df2aa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.150785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b11b7810a31a474ea27a32970b3df2aa [ 645.189895] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.198139] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.199016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg d04c07f918b544518c330c59d970d8fb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.209393] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d04c07f918b544518c330c59d970d8fb [ 645.210282] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Releasing lock "refresh_cache-00e11309-0f82-49d5-b4f5-02e2bdb517e8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.211147] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 645.211517] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 645.212207] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5bdffeaf-744d-48d4-a649-e020f83a3e4d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.224581] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c5de81-acba-4017-aae3-928c05418d74 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.251359] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 00e11309-0f82-49d5-b4f5-02e2bdb517e8 could not be found. [ 645.251982] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 645.252185] env[61594]: INFO nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 645.252716] env[61594]: DEBUG oslo.service.loopingcall [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 645.253164] env[61594]: DEBUG nova.compute.manager [-] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 645.253419] env[61594]: DEBUG nova.network.neutron [-] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 645.285412] env[61594]: DEBUG nova.network.neutron [-] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.285950] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a9807a9329bd45a5ab0b5c1ab445a260 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.300036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9807a9329bd45a5ab0b5c1ab445a260 [ 645.300596] env[61594]: DEBUG nova.network.neutron [-] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.301067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 717303b43b254497a0973fdda2002249 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.317330] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 717303b43b254497a0973fdda2002249 [ 645.317330] env[61594]: INFO nova.compute.manager [-] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Took 0.06 seconds to deallocate network for instance. [ 645.317330] env[61594]: DEBUG nova.compute.claims [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 645.318041] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.318041] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.319954] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 28c12f847cc546dd8938d6248f847ba5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.343275] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.343896] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 7a913530b07649458b3616cf3143db18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.359869] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a913530b07649458b3616cf3143db18 [ 645.362923] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Releasing lock "refresh_cache-0ce4707a-de75-438a-be72-d829478bfdff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.362923] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 645.362923] env[61594]: DEBUG nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 645.362923] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 645.369881] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28c12f847cc546dd8938d6248f847ba5 [ 645.398599] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.399214] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg b19ba20622e84430844b7d0d0b140733 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.411319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b19ba20622e84430844b7d0d0b140733 [ 645.411892] env[61594]: DEBUG nova.network.neutron [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.412382] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 38b6a6b3637e4da699bb852918e53a60 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.423609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38b6a6b3637e4da699bb852918e53a60 [ 645.424301] env[61594]: INFO nova.compute.manager [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] [instance: 0ce4707a-de75-438a-be72-d829478bfdff] Took 0.06 seconds to deallocate network for instance. [ 645.426050] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 22d89a94a6d440048c570031dde7174a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.476899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22d89a94a6d440048c570031dde7174a [ 645.479499] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg da10702669d34ffcbbc4302ad200ad12 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.492186] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e553f81ae044453fbdc25b93dc7835b7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.502319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e553f81ae044453fbdc25b93dc7835b7 [ 645.502984] env[61594]: DEBUG nova.network.neutron [-] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.503472] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 79cb42fa0a59401c95594a3976bbdd1c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.513354] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0fa3a0-4037-4393-8418-6c018227afad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.517099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79cb42fa0a59401c95594a3976bbdd1c [ 645.517697] env[61594]: INFO nova.compute.manager [-] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Took 1.64 seconds to deallocate network for instance. [ 645.527261] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da10702669d34ffcbbc4302ad200ad12 [ 645.528055] env[61594]: DEBUG nova.compute.claims [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 645.528415] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.531922] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75a8fc7-6bb9-4430-942a-4f38ca18f0ca {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.567637] env[61594]: INFO nova.scheduler.client.report [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Deleted allocations for instance 0ce4707a-de75-438a-be72-d829478bfdff [ 645.573261] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08be02a1-0659-45d2-b5b4-bea94ed4286d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.577243] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Expecting reply to msg 9652b23dd16d464a86301396246ef15e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.588869] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c234f49-6893-4f14-a352-4f335dba1cab {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.603314] env[61594]: DEBUG nova.compute.provider_tree [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.603808] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg bae5b8501cba4f77b27c6e17e39609a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.607210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9652b23dd16d464a86301396246ef15e [ 645.607569] env[61594]: DEBUG oslo_concurrency.lockutils [None req-841ccec2-9ff5-4877-ba11-d94213a4e089 tempest-ServersAdminNegativeTestJSON-1310685096 tempest-ServersAdminNegativeTestJSON-1310685096-project-member] Lock "0ce4707a-de75-438a-be72-d829478bfdff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 9.434s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.623430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bae5b8501cba4f77b27c6e17e39609a4 [ 645.624526] env[61594]: DEBUG nova.scheduler.client.report [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 645.627945] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg f41fb4bb5fbb420aa250f970fb77c457 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.644119] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f41fb4bb5fbb420aa250f970fb77c457 [ 645.644998] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.327s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.645621] env[61594]: ERROR nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Traceback (most recent call last): [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self.driver.spawn(context, instance, image_meta, [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] vm_ref = self.build_virtual_machine(instance, [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.645621] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] for vif in network_info: [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return self._sync_wrapper(fn, *args, **kwargs) [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self.wait() [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self[:] = self._gt.wait() [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return self._exit_event.wait() [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] result = hub.switch() [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.645967] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return self.greenlet.switch() [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] result = function(*args, **kwargs) [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] return func(*args, **kwargs) [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] raise e [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] nwinfo = self.network_api.allocate_for_instance( [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] created_port_ids = self._update_ports_for_instance( [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] with excutils.save_and_reraise_exception(): [ 645.646405] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] self.force_reraise() [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] raise self.value [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] updated_port = self._update_port( [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] _ensure_no_port_binding_failure(port) [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] raise exception.PortBindingFailed(port_id=port['id']) [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] nova.exception.PortBindingFailed: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. [ 645.646776] env[61594]: ERROR nova.compute.manager [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] [ 645.647090] env[61594]: DEBUG nova.compute.utils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 645.647755] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.119s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 645.652021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg aff432151b594c918503a56f7d07e5d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.652149] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Build of instance 00e11309-0f82-49d5-b4f5-02e2bdb517e8 was re-scheduled: Binding failed for port 2c49d0c6-a457-48eb-971f-541920c2e4f2, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 645.652526] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 645.652746] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquiring lock "refresh_cache-00e11309-0f82-49d5-b4f5-02e2bdb517e8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.652888] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Acquired lock "refresh_cache-00e11309-0f82-49d5-b4f5-02e2bdb517e8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.653062] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.653447] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 0169aa8a35014c2eae66835d36324790 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.661051] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0169aa8a35014c2eae66835d36324790 [ 645.684169] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.689425] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aff432151b594c918503a56f7d07e5d3 [ 645.757185] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.757725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg fcd381007d9740dfa17472a4960fdc8b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.768120] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcd381007d9740dfa17472a4960fdc8b [ 645.769410] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Releasing lock "refresh_cache-00e11309-0f82-49d5-b4f5-02e2bdb517e8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 645.769730] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 645.769925] env[61594]: DEBUG nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 645.770111] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 645.788292] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b586e04-557c-4865-bebb-6e024143d34c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.792078] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.792615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 9ff07ac467334be789ab87abae65cfdd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.798621] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb4bdf9-adfc-4fa6-94fa-517c6e88893e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.802761] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ff07ac467334be789ab87abae65cfdd [ 645.803239] env[61594]: DEBUG nova.network.neutron [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 645.803701] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 46454acdd3da490bba7bfb9416428ba7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.829341] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46454acdd3da490bba7bfb9416428ba7 [ 645.830480] env[61594]: INFO nova.compute.manager [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] [instance: 00e11309-0f82-49d5-b4f5-02e2bdb517e8] Took 0.06 seconds to deallocate network for instance. [ 645.832141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 02307e6d6078479cad6dae3505a20e88 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.833573] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea8157f-b349-4c42-9dc4-2777ebcc8ecc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.841943] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6b862b-2802-4642-87ab-7c8ec60cfb41 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.856712] env[61594]: DEBUG nova.compute.provider_tree [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 645.857121] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 881a5335836f426e98d4995a79f2a077 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.866272] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 881a5335836f426e98d4995a79f2a077 [ 645.867809] env[61594]: DEBUG nova.scheduler.client.report [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 645.870337] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 007ff5a563594f9398d55adc4c18e9e8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.872968] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02307e6d6078479cad6dae3505a20e88 [ 645.875278] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 46d35f910a5245b69d03f3f69ce3a89b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.886168] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 007ff5a563594f9398d55adc4c18e9e8 [ 645.887099] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.239s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 645.888258] env[61594]: ERROR nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] Traceback (most recent call last): [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self.driver.spawn(context, instance, image_meta, [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] vm_ref = self.build_virtual_machine(instance, [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] vif_infos = vmwarevif.get_vif_info(self._session, [ 645.888258] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] for vif in network_info: [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return self._sync_wrapper(fn, *args, **kwargs) [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self.wait() [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self[:] = self._gt.wait() [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return self._exit_event.wait() [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] result = hub.switch() [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 645.888957] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return self.greenlet.switch() [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] result = function(*args, **kwargs) [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] return func(*args, **kwargs) [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] raise e [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] nwinfo = self.network_api.allocate_for_instance( [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] created_port_ids = self._update_ports_for_instance( [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] with excutils.save_and_reraise_exception(): [ 645.889478] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] self.force_reraise() [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] raise self.value [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] updated_port = self._update_port( [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] _ensure_no_port_binding_failure(port) [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] raise exception.PortBindingFailed(port_id=port['id']) [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] nova.exception.PortBindingFailed: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. [ 645.889823] env[61594]: ERROR nova.compute.manager [instance: 63bccd39-d951-4187-872d-559cd7fead30] [ 645.890145] env[61594]: DEBUG nova.compute.utils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 645.891818] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Build of instance 63bccd39-d951-4187-872d-559cd7fead30 was re-scheduled: Binding failed for port 855ab017-300e-4d43-a4cc-2423f56b7a11, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 645.892387] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 645.892952] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "refresh_cache-63bccd39-d951-4187-872d-559cd7fead30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 645.893060] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquired lock "refresh_cache-63bccd39-d951-4187-872d-559cd7fead30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.893243] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 645.893666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 38a431d08e8f432384a11be8fbce024c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 645.903207] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38a431d08e8f432384a11be8fbce024c [ 645.918708] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46d35f910a5245b69d03f3f69ce3a89b [ 645.932977] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 645.945531] env[61594]: INFO nova.scheduler.client.report [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Deleted allocations for instance 00e11309-0f82-49d5-b4f5-02e2bdb517e8 [ 645.952899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Expecting reply to msg 8c6f37320f1542808cb0b659f326d839 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.170574] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c6f37320f1542808cb0b659f326d839 [ 646.170574] env[61594]: DEBUG oslo_concurrency.lockutils [None req-988f1a66-15d7-4e5a-ae4f-01a37abef9e8 tempest-DeleteServersAdminTestJSON-829365191 tempest-DeleteServersAdminTestJSON-829365191-project-member] Lock "00e11309-0f82-49d5-b4f5-02e2bdb517e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.291s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.170574] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.170574] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 185b110b006a472ea16a91344c198898 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.170574] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 185b110b006a472ea16a91344c198898 [ 646.170574] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Releasing lock "refresh_cache-63bccd39-d951-4187-872d-559cd7fead30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 646.171261] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 646.171261] env[61594]: DEBUG nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 646.171261] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 646.171261] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 646.171261] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg fc152faa1463421589463cb0326dbb1b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.172116] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc152faa1463421589463cb0326dbb1b [ 646.172116] env[61594]: DEBUG nova.network.neutron [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.172116] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg db628f9436204298931ce32890370bfb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.180937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db628f9436204298931ce32890370bfb [ 646.181531] env[61594]: INFO nova.compute.manager [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 63bccd39-d951-4187-872d-559cd7fead30] Took 0.05 seconds to deallocate network for instance. [ 646.185024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 814989f8f73641d5872b6554a3054679 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.222800] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 814989f8f73641d5872b6554a3054679 [ 646.228091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg f0acee7e8fd94aefa266ac9c4e5a946a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.280046] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0acee7e8fd94aefa266ac9c4e5a946a [ 646.311479] env[61594]: INFO nova.scheduler.client.report [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Deleted allocations for instance 63bccd39-d951-4187-872d-559cd7fead30 [ 646.319305] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg fea2d787ae374b9598814ee3d995b2af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.343770] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fea2d787ae374b9598814ee3d995b2af [ 646.344423] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3a00aa79-4b70-4ea4-a6e7-a0e852900394 tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "63bccd39-d951-4187-872d-559cd7fead30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.431s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.499385] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Acquiring lock "5aa53314-d177-4e8a-a2f5-ae0db9d30a66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.499587] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Lock "5aa53314-d177-4e8a-a2f5-ae0db9d30a66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.500112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg a93f2e3562904c129b96ad66588e03c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.511045] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a93f2e3562904c129b96ad66588e03c9 [ 646.511524] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 646.513300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 6a56fce5ceb94dbbb707fb04e1173b26 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.560727] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a56fce5ceb94dbbb707fb04e1173b26 [ 646.585456] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 646.585705] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 646.587781] env[61594]: INFO nova.compute.claims [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 646.590334] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg b62e972130984104a16d10c5a6559b38 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.652974] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b62e972130984104a16d10c5a6559b38 [ 646.654835] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 76881eaa68d44130a1816cfd20a7e1c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.666529] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76881eaa68d44130a1816cfd20a7e1c9 [ 646.736489] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5e35f2-17ad-4cab-849f-50c4001fd4a0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.745526] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb535e2-57c7-40c7-9632-6f3c76d20501 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.780879] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5548b1a-b2e2-430e-9a64-d11a4d637255 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.791426] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa4c406-a9c6-4b3f-a29f-7fc4015c70d3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.804080] env[61594]: DEBUG nova.compute.provider_tree [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.805012] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 77b6b8cebdc04cd88fa292138ed1b445 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.826401] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77b6b8cebdc04cd88fa292138ed1b445 [ 646.827934] env[61594]: DEBUG nova.scheduler.client.report [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 646.830887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 5f49f41148304bb8b6fa62703e362ebd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.850531] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f49f41148304bb8b6fa62703e362ebd [ 646.851512] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.266s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.852019] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 646.855503] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 3144eb34721e44ab9c67e5459016b0da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.915231] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3144eb34721e44ab9c67e5459016b0da [ 646.916423] env[61594]: DEBUG nova.compute.utils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 646.917053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 8e90f9f4343c4ea1bc54aed43f5a18cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.918083] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 646.918319] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 646.936420] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e90f9f4343c4ea1bc54aed43f5a18cc [ 646.937099] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 646.942027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 946553d3af7f4fce980953db3481b9de in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 646.981246] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 946553d3af7f4fce980953db3481b9de [ 646.984675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 526ae6ba55084f338c890b4ffe7407d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.029242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 526ae6ba55084f338c890b4ffe7407d7 [ 647.030577] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 647.062959] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.063222] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.063365] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.063550] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.063695] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.063836] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.064128] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.064312] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.064478] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.064633] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.064799] env[61594]: DEBUG nova.virt.hardware [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.065945] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078736aa-75f3-4b12-bcd7-945c31a7e3cc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.069726] env[61594]: DEBUG nova.policy [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3999c3b2af1f4bc089587acda9cbd4e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa911ec07ba14af1b645fa020802638c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 647.076630] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ff8fe41-8485-42de-88c6-443c35257b5a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.619748] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "b908bdf3-a20f-4156-9ece-8dc038c9f749" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.620087] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "b908bdf3-a20f-4156-9ece-8dc038c9f749" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.620642] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg eb9b01a226964c0daa147eb03f9f0cd8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.634868] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb9b01a226964c0daa147eb03f9f0cd8 [ 647.635936] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 647.637425] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg de385b3926f24578954180ef7d21542f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.688204] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de385b3926f24578954180ef7d21542f [ 647.696266] env[61594]: ERROR nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. [ 647.696266] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 647.696266] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 647.696266] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 647.696266] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.696266] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 647.696266] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.696266] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 647.696266] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.696266] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 647.696266] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.696266] env[61594]: ERROR nova.compute.manager raise self.value [ 647.696266] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.696266] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 647.696266] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.696266] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 647.698810] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.698810] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 647.698810] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. [ 647.698810] env[61594]: ERROR nova.compute.manager [ 647.698810] env[61594]: Traceback (most recent call last): [ 647.698810] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 647.698810] env[61594]: listener.cb(fileno) [ 647.698810] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 647.698810] env[61594]: result = function(*args, **kwargs) [ 647.698810] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.698810] env[61594]: return func(*args, **kwargs) [ 647.698810] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 647.698810] env[61594]: raise e [ 647.698810] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 647.698810] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 647.698810] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.698810] env[61594]: created_port_ids = self._update_ports_for_instance( [ 647.698810] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.698810] env[61594]: with excutils.save_and_reraise_exception(): [ 647.698810] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.698810] env[61594]: self.force_reraise() [ 647.698810] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.698810] env[61594]: raise self.value [ 647.698810] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.698810] env[61594]: updated_port = self._update_port( [ 647.698810] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.698810] env[61594]: _ensure_no_port_binding_failure(port) [ 647.698810] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.698810] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 647.700279] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. [ 647.700279] env[61594]: Removing descriptor: 22 [ 647.702138] env[61594]: ERROR nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Traceback (most recent call last): [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] yield resources [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self.driver.spawn(context, instance, image_meta, [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] vm_ref = self.build_virtual_machine(instance, [ 647.702138] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] for vif in network_info: [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return self._sync_wrapper(fn, *args, **kwargs) [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self.wait() [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self[:] = self._gt.wait() [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return self._exit_event.wait() [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.702539] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] result = hub.switch() [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return self.greenlet.switch() [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] result = function(*args, **kwargs) [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return func(*args, **kwargs) [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] raise e [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] nwinfo = self.network_api.allocate_for_instance( [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] created_port_ids = self._update_ports_for_instance( [ 647.702906] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] with excutils.save_and_reraise_exception(): [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self.force_reraise() [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] raise self.value [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] updated_port = self._update_port( [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] _ensure_no_port_binding_failure(port) [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] raise exception.PortBindingFailed(port_id=port['id']) [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] nova.exception.PortBindingFailed: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. [ 647.703268] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] [ 647.703654] env[61594]: INFO nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Terminating instance [ 647.706140] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Acquiring lock "refresh_cache-1385849e-9e64-4062-b6aa-300e6e7eab3d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.706415] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Acquired lock "refresh_cache-1385849e-9e64-4062-b6aa-300e6e7eab3d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.706786] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 647.707329] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 0c6271cf659b4586b2acb0f43728249b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.719666] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.719666] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.721093] env[61594]: INFO nova.compute.claims [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.722690] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 7f013c0785bf4e7fae76dc52d08e8906 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.724034] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c6271cf659b4586b2acb0f43728249b [ 647.761453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f013c0785bf4e7fae76dc52d08e8906 [ 647.762684] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 4614efc1bf7348e89847bb9ad3281115 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.771336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4614efc1bf7348e89847bb9ad3281115 [ 647.776853] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.861430] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde89b43-590c-44d2-b6d7-57ca1954725e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.869592] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b84eb12-e6bb-40b4-9e6e-c42fd8758c3c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.903179] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12beb0d-f36b-44be-9188-50a446319b3e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.911443] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f00ad3f5-62a3-46c4-bc9a-77fceb0c3571 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.925330] env[61594]: DEBUG nova.compute.provider_tree [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 647.925856] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 14f77202b0e94afaa200d4f3abaee267 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.934827] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14f77202b0e94afaa200d4f3abaee267 [ 647.935913] env[61594]: DEBUG nova.scheduler.client.report [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 647.938360] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg daf33fc658434350bfd8c2ddbf3c7c6c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.953184] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daf33fc658434350bfd8c2ddbf3c7c6c [ 647.954419] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.236s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.954507] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 647.957511] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 03731e457f794685adff3080226f49fb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.993982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03731e457f794685adff3080226f49fb [ 647.995355] env[61594]: DEBUG nova.compute.utils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 647.995932] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg b4e608c98317407ea1cfca2874b32c5f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 647.996996] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 647.997246] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 648.012248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4e608c98317407ea1cfca2874b32c5f [ 648.013268] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 648.015254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg e1e66242aacb4d99a89557b07cad9931 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.051958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1e66242aacb4d99a89557b07cad9931 [ 648.057898] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 3681279db2794baab1400057ab1023a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.077103] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.077103] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg e3273868f0b24da59ff0beabb0fc4af2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.088603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3273868f0b24da59ff0beabb0fc4af2 [ 648.089706] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Releasing lock "refresh_cache-1385849e-9e64-4062-b6aa-300e6e7eab3d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.090047] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 648.090265] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 648.091378] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8154df2-5855-4e92-9336-f8c6a21b2cfc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.100719] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd119020-543c-4c9c-b3bf-da175f584cfb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.113540] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3681279db2794baab1400057ab1023a4 [ 648.116366] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 648.130994] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1385849e-9e64-4062-b6aa-300e6e7eab3d could not be found. [ 648.132361] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 648.132361] env[61594]: INFO nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 648.132361] env[61594]: DEBUG oslo.service.loopingcall [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 648.132361] env[61594]: DEBUG nova.compute.manager [-] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 648.132361] env[61594]: DEBUG nova.network.neutron [-] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 648.151469] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 648.151746] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 648.151955] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 648.152666] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 648.152666] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 648.152666] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 648.152666] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 648.152808] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 648.152914] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 648.153088] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 648.153263] env[61594]: DEBUG nova.virt.hardware [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 648.154146] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6d3677-c435-4535-b19e-aafd5b338b9a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.162718] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c6831f-8516-4392-8d42-13041a1e1c57 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.190257] env[61594]: DEBUG nova.network.neutron [-] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.190781] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b14b6592c1ef48a6aceed871d4198423 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.199581] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b14b6592c1ef48a6aceed871d4198423 [ 648.199887] env[61594]: DEBUG nova.network.neutron [-] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.200428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 802b19c1fa274f3abe8830c05604e6bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.216409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 802b19c1fa274f3abe8830c05604e6bb [ 648.216409] env[61594]: INFO nova.compute.manager [-] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Took 0.08 seconds to deallocate network for instance. [ 648.218113] env[61594]: DEBUG nova.compute.claims [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 648.218232] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.218491] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.220818] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 4a1d9c7d76bc40d7b55d11556131fdd3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.279692] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a1d9c7d76bc40d7b55d11556131fdd3 [ 648.293411] env[61594]: DEBUG nova.policy [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b7538aa389a419f9928928eb07ee622', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '751185c006ea414ea0ce2117380f13f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 648.401131] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717c5af5-47e3-48ad-9f4c-2df9e7c8bd01 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.411434] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2144764b-e909-437b-afb8-4c2b097b46de {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.447795] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64c2483-5bba-4fbf-8e84-630063b33fb4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.454948] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40fbfbe-51a7-4a5f-a0ab-c42734fcf317 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.471268] env[61594]: DEBUG nova.compute.provider_tree [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.471815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 9225b65604a84108bc6ee35342947da5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.492534] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9225b65604a84108bc6ee35342947da5 [ 648.494840] env[61594]: DEBUG nova.scheduler.client.report [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 648.495471] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg b8b12087ae8a401b96a3c8d5424ad3af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.518058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8b12087ae8a401b96a3c8d5424ad3af [ 648.520975] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.301s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.520975] env[61594]: ERROR nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. [ 648.520975] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Traceback (most recent call last): [ 648.520975] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 648.520975] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self.driver.spawn(context, instance, image_meta, [ 648.520975] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 648.520975] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 648.520975] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 648.520975] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] vm_ref = self.build_virtual_machine(instance, [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] vif_infos = vmwarevif.get_vif_info(self._session, [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] for vif in network_info: [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return self._sync_wrapper(fn, *args, **kwargs) [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self.wait() [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self[:] = self._gt.wait() [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return self._exit_event.wait() [ 648.522649] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] result = hub.switch() [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return self.greenlet.switch() [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] result = function(*args, **kwargs) [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] return func(*args, **kwargs) [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] raise e [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] nwinfo = self.network_api.allocate_for_instance( [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 648.522997] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] created_port_ids = self._update_ports_for_instance( [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] with excutils.save_and_reraise_exception(): [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] self.force_reraise() [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] raise self.value [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] updated_port = self._update_port( [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] _ensure_no_port_binding_failure(port) [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 648.523345] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] raise exception.PortBindingFailed(port_id=port['id']) [ 648.523782] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] nova.exception.PortBindingFailed: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. [ 648.523782] env[61594]: ERROR nova.compute.manager [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] [ 648.523782] env[61594]: DEBUG nova.compute.utils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 648.523782] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Build of instance 1385849e-9e64-4062-b6aa-300e6e7eab3d was re-scheduled: Binding failed for port 17510c18-0039-426c-a5b8-1f817ff57d52, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 648.524085] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 648.524881] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Acquiring lock "refresh_cache-1385849e-9e64-4062-b6aa-300e6e7eab3d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 648.524881] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Acquired lock "refresh_cache-1385849e-9e64-4062-b6aa-300e6e7eab3d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.524881] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 648.525030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg e663ab3906b24ec98ed1f3256d6a1a0c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.539309] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e663ab3906b24ec98ed1f3256d6a1a0c [ 648.574881] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Successfully created port: 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.602241] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.727235] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "c0b7aef6-0633-420d-906b-818bcc54072e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.727235] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "c0b7aef6-0633-420d-906b-818bcc54072e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.727235] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg a8d5576cbf834cceb4afabf0673fd0f8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.739274] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8d5576cbf834cceb4afabf0673fd0f8 [ 648.741260] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 648.742431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 36d1c1a08c934724863b9cbc17028d00 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.797522] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36d1c1a08c934724863b9cbc17028d00 [ 648.827941] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 648.828427] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.833058] env[61594]: INFO nova.compute.claims [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 648.833058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 93d749bafe2c4348a57dbfea25140e45 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.869942] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93d749bafe2c4348a57dbfea25140e45 [ 648.872169] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg e04c2ad116454cfcb1e08a878b5a561d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.885514] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.886243] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg b9c48c3ffeff49bf90bc08b4e9a4603b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.888867] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e04c2ad116454cfcb1e08a878b5a561d [ 648.901773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9c48c3ffeff49bf90bc08b4e9a4603b [ 648.902395] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Releasing lock "refresh_cache-1385849e-9e64-4062-b6aa-300e6e7eab3d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.902613] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 648.902892] env[61594]: DEBUG nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 648.902981] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 648.959339] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 648.961266] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 85969751be324ac28050168e4523176e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.976350] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85969751be324ac28050168e4523176e [ 648.977124] env[61594]: DEBUG nova.network.neutron [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.977428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 6a025b32dd354f30bb27ae2d9128ec37 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 648.995911] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a025b32dd354f30bb27ae2d9128ec37 [ 648.995911] env[61594]: INFO nova.compute.manager [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] [instance: 1385849e-9e64-4062-b6aa-300e6e7eab3d] Took 0.09 seconds to deallocate network for instance. [ 648.996911] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 9b824c70e7bb4be4b89d24467dd81a84 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.028362] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f806e524-f7e6-49f8-9c83-09a28e660262 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.036869] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ff317e-24a9-4d7b-a9e1-8b502e0dc291 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.078050] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b824c70e7bb4be4b89d24467dd81a84 [ 649.081403] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 9392ec8029f24c5f9ccc7b721aea7bb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.086019] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c18b11-98d3-41f4-98fc-6dbf2e0c1a84 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.091792] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b459547-a59d-4fd1-840a-d59c83b00423 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.106492] env[61594]: DEBUG nova.compute.provider_tree [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.106981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg b809630a7c1045c0aa623008e271aa6a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.125728] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b809630a7c1045c0aa623008e271aa6a [ 649.125728] env[61594]: DEBUG nova.scheduler.client.report [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 649.129707] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 77ff017a4594493d9eea7f87da3aced7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.130921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9392ec8029f24c5f9ccc7b721aea7bb4 [ 649.152736] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77ff017a4594493d9eea7f87da3aced7 [ 649.155713] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.326s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.155871] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 649.157761] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 7945b961398541a8943cfac5d353986e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.179829] env[61594]: INFO nova.scheduler.client.report [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Deleted allocations for instance 1385849e-9e64-4062-b6aa-300e6e7eab3d [ 649.188962] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Expecting reply to msg 253ad697a3734e8eba71e20eccb24b3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.216500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7945b961398541a8943cfac5d353986e [ 649.219177] env[61594]: DEBUG nova.compute.utils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 649.221157] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg f457bb9a932648a99a7cb8415dac3f77 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.226305] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 253ad697a3734e8eba71e20eccb24b3a [ 649.226986] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 649.227150] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 649.229583] env[61594]: DEBUG oslo_concurrency.lockutils [None req-05cc0ecb-8de9-477d-93f0-f68b8cf46bcc tempest-VolumesAssistedSnapshotsTest-541539088 tempest-VolumesAssistedSnapshotsTest-541539088-project-member] Lock "1385849e-9e64-4062-b6aa-300e6e7eab3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 6.718s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.244843] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f457bb9a932648a99a7cb8415dac3f77 [ 649.245814] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 649.247542] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 08d8102ce4cb454d8ba51c119d889389 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.287141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08d8102ce4cb454d8ba51c119d889389 [ 649.291548] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 9f6ca0204e7740399f15bd0870dabcc1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 649.341331] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f6ca0204e7740399f15bd0870dabcc1 [ 649.342638] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 649.382414] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.382414] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.382414] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.383272] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.383272] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.383473] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.383939] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.384201] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.385193] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.385193] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.385193] env[61594]: DEBUG nova.virt.hardware [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.387488] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b9dcb2-cfd6-4588-9311-903c8dae7cc9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.398314] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327a26e1-0b37-48e6-8e1a-3dbec7a37d70 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.612976] env[61594]: DEBUG nova.policy [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee5a21ff43314c1a857f6958056173f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afc5e909ec5c4dd983ece5aa3236910f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 650.213407] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Successfully created port: 2caa9152-e1cc-4d04-9e73-221b56b1f580 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 650.587666] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "a07bc185-7541-4a42-8c83-76cdcf157167" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.587929] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "a07bc185-7541-4a42-8c83-76cdcf157167" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.588435] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg dbc296fe9c904f56a34ee2894f68a343 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.599018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbc296fe9c904f56a34ee2894f68a343 [ 650.599578] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 650.602429] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 3e8beaaad75543ddad7984f892ae4a1d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.637492] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e8beaaad75543ddad7984f892ae4a1d [ 650.659415] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.659415] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.661966] env[61594]: INFO nova.compute.claims [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.665981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 62ae6d2b6a894714a7f5179876e052c7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.697440] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62ae6d2b6a894714a7f5179876e052c7 [ 650.701539] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 643cc9384bfc4bc68dea200c446c107d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.712085] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 643cc9384bfc4bc68dea200c446c107d [ 650.821331] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb80b3d-f8c8-4a55-97bf-d4d2a91140d9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.829806] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f615ee67-1e22-4158-be13-fe2d1a3dd910 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.869832] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e535900-1d04-4af3-8ac1-58473c560903 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.880045] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e39183-3f2c-4a44-ab22-aeaff87f68d3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.898961] env[61594]: DEBUG nova.compute.provider_tree [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.899708] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 5e32b5b168304b2aabe2d10ebd36d012 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.917704] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e32b5b168304b2aabe2d10ebd36d012 [ 650.918791] env[61594]: DEBUG nova.scheduler.client.report [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 650.921252] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 64276038a47444a3863de2188edf1997 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.937081] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64276038a47444a3863de2188edf1997 [ 650.937967] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.279s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 650.938460] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 650.940757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 6965e3987f8b48ddb450783ed7c74111 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.981254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6965e3987f8b48ddb450783ed7c74111 [ 650.982763] env[61594]: DEBUG nova.compute.utils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 650.983370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg cf6ad50a34184bd1bc88ef0bd3d77edc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 650.988411] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 650.988411] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 650.997887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf6ad50a34184bd1bc88ef0bd3d77edc [ 650.998526] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 651.000504] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg d2023fcc28944e4d9b0b2a16630ea6b7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 651.045620] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2023fcc28944e4d9b0b2a16630ea6b7 [ 651.049588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 1ac5219cc51a4cab85b77a12e96ec279 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 651.097698] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ac5219cc51a4cab85b77a12e96ec279 [ 651.101240] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 651.134935] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 651.134935] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 651.134935] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 651.135430] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 651.135430] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 651.135430] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 651.135430] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 651.135430] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 651.135895] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 651.136269] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 651.136632] env[61594]: DEBUG nova.virt.hardware [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 651.137946] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f40e308d-edc2-4d93-89a3-ad7e7b93ebac {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.146635] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8e50e9d-014a-4391-945e-481563d83e2c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.166900] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Successfully created port: 07c16f5c-e943-48a0-81a3-e7f9ee9ca363 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.471142] env[61594]: DEBUG nova.policy [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b7538aa389a419f9928928eb07ee622', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '751185c006ea414ea0ce2117380f13f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 653.863493] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Successfully created port: ee5f9094-c2bd-4d83-b04f-0fb72ece6737 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.113696] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "03828801-4d8a-47dc-957e-f1aa64b652da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.113696] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "03828801-4d8a-47dc-957e-f1aa64b652da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.113696] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg cfb491afdb804be7a4292155dfddad86 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.131771] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfb491afdb804be7a4292155dfddad86 [ 656.131771] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 656.131771] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg a7a8f406df1943e9984489fb7141ae20 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.166957] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7a8f406df1943e9984489fb7141ae20 [ 656.185983] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 656.186333] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.188766] env[61594]: INFO nova.compute.claims [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 656.191293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg b0dd240541f748edb6969405e31c2c60 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.232760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0dd240541f748edb6969405e31c2c60 [ 656.234623] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 115feedfa17f4419a43b697abd527afa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.250016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 115feedfa17f4419a43b697abd527afa [ 656.385015] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b045552c-8ce8-4f61-8ef1-41b65c76e1ad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.397849] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3223d2a-4abb-4a64-9600-b4af9c5dc31e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.433538] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f5cf89-5866-46e4-bbc9-9a0982a38233 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.441850] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29b3ae97-428e-44e5-a8f6-33f537459d53 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.455850] env[61594]: DEBUG nova.compute.provider_tree [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.456104] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 0b27758513104920b7ad0e05e4df7b4e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.464102] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b27758513104920b7ad0e05e4df7b4e [ 656.465040] env[61594]: DEBUG nova.scheduler.client.report [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 656.467298] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 6290183003414a519f5f017a941b8afd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.479648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6290183003414a519f5f017a941b8afd [ 656.480689] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.295s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 656.481119] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 656.482810] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg e6f36d18c7724816b25802129d296997 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.536124] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6f36d18c7724816b25802129d296997 [ 656.538147] env[61594]: DEBUG nova.compute.utils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.538493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 4668debf3e544ab79f443e29c7785ec7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.539450] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 656.539663] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 656.552957] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4668debf3e544ab79f443e29c7785ec7 [ 656.553553] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 656.555221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 98a1100cf35744d084a716be8c911896 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.587235] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98a1100cf35744d084a716be8c911896 [ 656.590993] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 40923ceca4834374bfcebd55604e8eb8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 656.626250] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40923ceca4834374bfcebd55604e8eb8 [ 656.627400] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 656.658531] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 656.658783] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 656.658941] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 656.659139] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 656.659289] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 656.659439] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 656.659648] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 656.659809] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 656.659972] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 656.660189] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 656.660366] env[61594]: DEBUG nova.virt.hardware [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 656.661252] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412e9578-e48f-4ce6-b021-89282d77164a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.670716] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce03800a-5099-46f2-a933-170fa46de6d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.950583] env[61594]: DEBUG nova.policy [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a893b6f20a704d77968791ca48532894', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b410a9b0a0a54c00bbbaa088cd81b957', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 657.539204] env[61594]: DEBUG nova.compute.manager [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Received event network-changed-1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 657.540218] env[61594]: DEBUG nova.compute.manager [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Refreshing instance network info cache due to event network-changed-1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 657.540218] env[61594]: DEBUG oslo_concurrency.lockutils [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] Acquiring lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.540218] env[61594]: DEBUG oslo_concurrency.lockutils [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] Acquired lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.540218] env[61594]: DEBUG nova.network.neutron [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Refreshing network info cache for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 657.542241] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] Expecting reply to msg e9383bee9a6d419699291ffd4fe31364 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 657.559486] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9383bee9a6d419699291ffd4fe31364 [ 657.721907] env[61594]: DEBUG nova.network.neutron [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 657.734687] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Acquiring lock "467ba147-20b6-41eb-852f-4097cb45ba6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.734687] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Lock "467ba147-20b6-41eb-852f-4097cb45ba6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.734687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 48895de89ddf451dbf6be4a4b864d4a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 657.750483] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48895de89ddf451dbf6be4a4b864d4a5 [ 657.751179] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 657.753902] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg dcf639da7e4f47659de1104a3df16451 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 657.815352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcf639da7e4f47659de1104a3df16451 [ 657.848869] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.850999] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.852313] env[61594]: INFO nova.compute.claims [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.855857] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 8a1b50fb80994985aea8302d2a239fe7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 657.919746] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a1b50fb80994985aea8302d2a239fe7 [ 657.919746] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 619762677f2e409b89e94c3a43538d7b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 657.932237] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 619762677f2e409b89e94c3a43538d7b [ 658.066900] env[61594]: ERROR nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. [ 658.066900] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 658.066900] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 658.066900] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 658.066900] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.066900] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 658.066900] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.066900] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 658.066900] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.066900] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 658.066900] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.066900] env[61594]: ERROR nova.compute.manager raise self.value [ 658.066900] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.066900] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 658.066900] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.066900] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 658.067522] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.067522] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 658.067522] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. [ 658.067522] env[61594]: ERROR nova.compute.manager [ 658.067985] env[61594]: Traceback (most recent call last): [ 658.068098] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 658.068098] env[61594]: listener.cb(fileno) [ 658.068184] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 658.068184] env[61594]: result = function(*args, **kwargs) [ 658.068256] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 658.068256] env[61594]: return func(*args, **kwargs) [ 658.068322] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 658.068322] env[61594]: raise e [ 658.068390] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 658.068390] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 658.068451] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.068451] env[61594]: created_port_ids = self._update_ports_for_instance( [ 658.068524] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.068524] env[61594]: with excutils.save_and_reraise_exception(): [ 658.068594] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.068594] env[61594]: self.force_reraise() [ 658.068659] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.068659] env[61594]: raise self.value [ 658.068721] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.068721] env[61594]: updated_port = self._update_port( [ 658.068785] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.068785] env[61594]: _ensure_no_port_binding_failure(port) [ 658.069167] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.069167] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 658.069167] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. [ 658.069167] env[61594]: Removing descriptor: 19 [ 658.072020] env[61594]: ERROR nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Traceback (most recent call last): [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] yield resources [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self.driver.spawn(context, instance, image_meta, [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] vm_ref = self.build_virtual_machine(instance, [ 658.072020] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] vif_infos = vmwarevif.get_vif_info(self._session, [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] for vif in network_info: [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return self._sync_wrapper(fn, *args, **kwargs) [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self.wait() [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self[:] = self._gt.wait() [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return self._exit_event.wait() [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 658.072448] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] result = hub.switch() [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return self.greenlet.switch() [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] result = function(*args, **kwargs) [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return func(*args, **kwargs) [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] raise e [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] nwinfo = self.network_api.allocate_for_instance( [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] created_port_ids = self._update_ports_for_instance( [ 658.072897] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] with excutils.save_and_reraise_exception(): [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self.force_reraise() [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] raise self.value [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] updated_port = self._update_port( [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] _ensure_no_port_binding_failure(port) [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] raise exception.PortBindingFailed(port_id=port['id']) [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] nova.exception.PortBindingFailed: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. [ 658.073288] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] [ 658.073633] env[61594]: INFO nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Terminating instance [ 658.075118] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Acquiring lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.103754] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa55b6d0-5996-49b0-a777-c4258e1f955d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.112297] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a870ef46-7fc2-4380-ad56-b030911e6ca3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.150322] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9f4742-cdc4-4cd6-a996-708f2bbd33e5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.159051] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5b2be1-672c-451f-ac53-96c05ff3acec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.175506] env[61594]: DEBUG nova.compute.provider_tree [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.177543] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 9ba66535a6fe499095dd804de2d1c719 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.193411] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ba66535a6fe499095dd804de2d1c719 [ 658.193411] env[61594]: DEBUG nova.scheduler.client.report [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 658.195836] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg e60ecef801e548d6828d9c80d7bd1bd5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.232836] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e60ecef801e548d6828d9c80d7bd1bd5 [ 658.233722] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.384s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.234783] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 658.237128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg f7848714b2f54021adf415d7f04f1f0c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.311241] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7848714b2f54021adf415d7f04f1f0c [ 658.312807] env[61594]: DEBUG nova.compute.utils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 658.313423] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 29dc00916418415cb9000e06dd8ae851 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.314320] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 658.314558] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 658.330974] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29dc00916418415cb9000e06dd8ae851 [ 658.332477] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 658.335662] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 3b8ed8f6069747ef822e9f39efeeaecd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.411402] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b8ed8f6069747ef822e9f39efeeaecd [ 658.417846] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 414b453122a343bd8973fc92f209030b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.504595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 414b453122a343bd8973fc92f209030b [ 658.504595] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 658.542965] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:27:52Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1748289917',id=25,is_public=True,memory_mb=128,name='tempest-flavor_with_ephemeral_0-76535309',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 658.543295] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 658.543440] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.543798] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 658.543798] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.544219] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 658.545507] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 658.545590] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 658.546413] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 658.546706] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 658.547764] env[61594]: DEBUG nova.virt.hardware [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 658.550301] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15dcfe98-c5cc-4c8f-b7d4-5a1f1262828d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.561500] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5d54d1-2798-4db4-8fc7-ef7a8fad525f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.667667] env[61594]: DEBUG nova.compute.manager [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Received event network-changed-07c16f5c-e943-48a0-81a3-e7f9ee9ca363 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 658.667667] env[61594]: DEBUG nova.compute.manager [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Refreshing instance network info cache due to event network-changed-07c16f5c-e943-48a0-81a3-e7f9ee9ca363. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 658.667667] env[61594]: DEBUG oslo_concurrency.lockutils [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] Acquiring lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.667667] env[61594]: DEBUG oslo_concurrency.lockutils [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] Acquired lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.667667] env[61594]: DEBUG nova.network.neutron [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Refreshing network info cache for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 658.670692] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] Expecting reply to msg eacef03f1bce42d99e7fa318471eecc0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.676439] env[61594]: DEBUG nova.network.neutron [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.677127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] Expecting reply to msg 078b5d5e08eb4fd28fbc933f61722f04 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.692430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eacef03f1bce42d99e7fa318471eecc0 [ 658.697482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 078b5d5e08eb4fd28fbc933f61722f04 [ 658.703040] env[61594]: DEBUG oslo_concurrency.lockutils [req-0dbf3cac-3f0f-411f-8884-c2d6f156126a req-8dfa94b3-f2c0-467f-8f1b-1503ea2aec9d service nova] Releasing lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.704020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Acquired lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.704020] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 658.704755] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 63d9e3a4f09443a19d862ceb45926cb2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 658.712019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63d9e3a4f09443a19d862ceb45926cb2 [ 658.897366] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.908705] env[61594]: DEBUG nova.network.neutron [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.996708] env[61594]: DEBUG nova.policy [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5510866e61643bcb2d9c401fa9b9426', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c54606a0dde443f699e33acda0182e64', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 659.267906] env[61594]: ERROR nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. [ 659.267906] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 659.267906] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 659.267906] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 659.267906] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 659.267906] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 659.267906] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 659.267906] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 659.267906] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.267906] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 659.267906] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.267906] env[61594]: ERROR nova.compute.manager raise self.value [ 659.267906] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 659.267906] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 659.267906] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.267906] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 659.272527] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.272527] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 659.272527] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. [ 659.272527] env[61594]: ERROR nova.compute.manager [ 659.272527] env[61594]: Traceback (most recent call last): [ 659.272527] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 659.272527] env[61594]: listener.cb(fileno) [ 659.272527] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 659.272527] env[61594]: result = function(*args, **kwargs) [ 659.272527] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 659.272527] env[61594]: return func(*args, **kwargs) [ 659.272527] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 659.272527] env[61594]: raise e [ 659.272527] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 659.272527] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 659.272527] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 659.272527] env[61594]: created_port_ids = self._update_ports_for_instance( [ 659.272527] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 659.272527] env[61594]: with excutils.save_and_reraise_exception(): [ 659.272527] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.272527] env[61594]: self.force_reraise() [ 659.272527] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.272527] env[61594]: raise self.value [ 659.272527] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 659.272527] env[61594]: updated_port = self._update_port( [ 659.272527] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.272527] env[61594]: _ensure_no_port_binding_failure(port) [ 659.272527] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.272527] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 659.273589] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. [ 659.273589] env[61594]: Removing descriptor: 22 [ 659.273589] env[61594]: ERROR nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Traceback (most recent call last): [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] yield resources [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self.driver.spawn(context, instance, image_meta, [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 659.273589] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] vm_ref = self.build_virtual_machine(instance, [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] vif_infos = vmwarevif.get_vif_info(self._session, [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] for vif in network_info: [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return self._sync_wrapper(fn, *args, **kwargs) [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self.wait() [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self[:] = self._gt.wait() [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return self._exit_event.wait() [ 659.274122] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] result = hub.switch() [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return self.greenlet.switch() [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] result = function(*args, **kwargs) [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return func(*args, **kwargs) [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] raise e [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] nwinfo = self.network_api.allocate_for_instance( [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 659.275563] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] created_port_ids = self._update_ports_for_instance( [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] with excutils.save_and_reraise_exception(): [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self.force_reraise() [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] raise self.value [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] updated_port = self._update_port( [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] _ensure_no_port_binding_failure(port) [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 659.275942] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] raise exception.PortBindingFailed(port_id=port['id']) [ 659.276498] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] nova.exception.PortBindingFailed: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. [ 659.276498] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] [ 659.276498] env[61594]: INFO nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Terminating instance [ 659.284641] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 659.979793] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.979793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg d80a831e74b24da9a324d8376475f00c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 659.987882] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d80a831e74b24da9a324d8376475f00c [ 659.988550] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Releasing lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 659.988984] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 659.989223] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 659.989761] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d7a137dd-0841-45a4-93a1-94a97469b990 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.993483] env[61594]: DEBUG nova.network.neutron [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.993995] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] Expecting reply to msg 57f0980fabc54ad79d5eddf9f17d4022 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.003699] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bb6081-6362-4548-b462-9161e1ce5f6a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.020082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57f0980fabc54ad79d5eddf9f17d4022 [ 660.020082] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Successfully created port: ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 660.021323] env[61594]: DEBUG oslo_concurrency.lockutils [req-91601693-74ac-4c90-b9c3-d0d3e34fde7d req-4d6e2f93-871e-46a8-bfa8-cc30f4d4aa2a service nova] Releasing lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.022833] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.023057] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 660.023487] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 6424a925fdfe4bafa541e03c3404ebc6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.038504] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6424a925fdfe4bafa541e03c3404ebc6 [ 660.039284] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5aa53314-d177-4e8a-a2f5-ae0db9d30a66 could not be found. [ 660.039492] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 660.039796] env[61594]: INFO nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Took 0.05 seconds to destroy the instance on the hypervisor. [ 660.040082] env[61594]: DEBUG oslo.service.loopingcall [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.042756] env[61594]: DEBUG nova.compute.manager [-] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 660.042911] env[61594]: DEBUG nova.network.neutron [-] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 660.139069] env[61594]: DEBUG nova.network.neutron [-] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.140140] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg efedc477872d4bf0a16421b64ddbf847 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.150594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efedc477872d4bf0a16421b64ddbf847 [ 660.150594] env[61594]: DEBUG nova.network.neutron [-] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.150594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 94c69de5cc5947d982f6eb64f24a033c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.161119] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94c69de5cc5947d982f6eb64f24a033c [ 660.161535] env[61594]: INFO nova.compute.manager [-] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Took 0.12 seconds to deallocate network for instance. [ 660.162517] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.169457] env[61594]: DEBUG nova.compute.claims [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 660.169705] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 660.169959] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 660.172482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 75320288ae974bffaa9013a2b1f137a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.244621] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75320288ae974bffaa9013a2b1f137a9 [ 660.441746] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfdbde8-2444-4323-90f4-e36c46a958be {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.456450] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-544a5a57-0ae3-4242-b4ad-98ee3b01b1d4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.491404] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e23755-dcff-4f16-bd42-7e8f2c1f5537 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.498173] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d14034-a7f7-4619-9c40-2c34155df398 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.513631] env[61594]: DEBUG nova.compute.provider_tree [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.514173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 319b8ae6393c42469ea5a3af4402559f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.524118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 319b8ae6393c42469ea5a3af4402559f [ 660.524715] env[61594]: DEBUG nova.scheduler.client.report [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 660.526941] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 16daa40e0f9e444fa2a77cf65e42556f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.550588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16daa40e0f9e444fa2a77cf65e42556f [ 660.551442] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.381s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.552059] env[61594]: ERROR nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Traceback (most recent call last): [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self.driver.spawn(context, instance, image_meta, [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self._vmops.spawn(context, instance, image_meta, injected_files, [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] vm_ref = self.build_virtual_machine(instance, [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] vif_infos = vmwarevif.get_vif_info(self._session, [ 660.552059] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] for vif in network_info: [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return self._sync_wrapper(fn, *args, **kwargs) [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self.wait() [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self[:] = self._gt.wait() [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return self._exit_event.wait() [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] result = hub.switch() [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 660.552565] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return self.greenlet.switch() [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] result = function(*args, **kwargs) [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] return func(*args, **kwargs) [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] raise e [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] nwinfo = self.network_api.allocate_for_instance( [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] created_port_ids = self._update_ports_for_instance( [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] with excutils.save_and_reraise_exception(): [ 660.553146] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] self.force_reraise() [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] raise self.value [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] updated_port = self._update_port( [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] _ensure_no_port_binding_failure(port) [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] raise exception.PortBindingFailed(port_id=port['id']) [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] nova.exception.PortBindingFailed: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. [ 660.553605] env[61594]: ERROR nova.compute.manager [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] [ 660.554042] env[61594]: DEBUG nova.compute.utils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 660.554212] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Build of instance 5aa53314-d177-4e8a-a2f5-ae0db9d30a66 was re-scheduled: Binding failed for port 1ecca70e-b1c0-4563-8b14-d6cdc6b92ebe, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 660.554631] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 660.554852] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Acquiring lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.554998] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Acquired lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.555174] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 660.555569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 255ace253b5b44c7b1cc4668db967022 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.567253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 255ace253b5b44c7b1cc4668db967022 [ 660.745285] env[61594]: ERROR nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. [ 660.745285] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 660.745285] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 660.745285] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 660.745285] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 660.745285] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 660.745285] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 660.745285] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 660.745285] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.745285] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 660.745285] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.745285] env[61594]: ERROR nova.compute.manager raise self.value [ 660.745285] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 660.745285] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 660.745285] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.745285] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 660.745884] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.745884] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 660.745884] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. [ 660.745884] env[61594]: ERROR nova.compute.manager [ 660.745884] env[61594]: Traceback (most recent call last): [ 660.745884] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 660.745884] env[61594]: listener.cb(fileno) [ 660.745884] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 660.745884] env[61594]: result = function(*args, **kwargs) [ 660.745884] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 660.745884] env[61594]: return func(*args, **kwargs) [ 660.745884] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 660.745884] env[61594]: raise e [ 660.745884] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 660.745884] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 660.745884] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 660.745884] env[61594]: created_port_ids = self._update_ports_for_instance( [ 660.745884] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 660.745884] env[61594]: with excutils.save_and_reraise_exception(): [ 660.745884] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.745884] env[61594]: self.force_reraise() [ 660.745884] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.745884] env[61594]: raise self.value [ 660.745884] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 660.745884] env[61594]: updated_port = self._update_port( [ 660.745884] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.745884] env[61594]: _ensure_no_port_binding_failure(port) [ 660.745884] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.745884] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 660.746670] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. [ 660.746670] env[61594]: Removing descriptor: 20 [ 660.746670] env[61594]: ERROR nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Traceback (most recent call last): [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] yield resources [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self.driver.spawn(context, instance, image_meta, [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self._vmops.spawn(context, instance, image_meta, injected_files, [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 660.746670] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] vm_ref = self.build_virtual_machine(instance, [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] vif_infos = vmwarevif.get_vif_info(self._session, [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] for vif in network_info: [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return self._sync_wrapper(fn, *args, **kwargs) [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self.wait() [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self[:] = self._gt.wait() [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return self._exit_event.wait() [ 660.747071] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] result = hub.switch() [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return self.greenlet.switch() [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] result = function(*args, **kwargs) [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return func(*args, **kwargs) [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] raise e [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] nwinfo = self.network_api.allocate_for_instance( [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 660.747408] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] created_port_ids = self._update_ports_for_instance( [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] with excutils.save_and_reraise_exception(): [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self.force_reraise() [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] raise self.value [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] updated_port = self._update_port( [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] _ensure_no_port_binding_failure(port) [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 660.747752] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] raise exception.PortBindingFailed(port_id=port['id']) [ 660.748106] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] nova.exception.PortBindingFailed: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. [ 660.748106] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] [ 660.748106] env[61594]: INFO nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Terminating instance [ 660.753021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "refresh_cache-b908bdf3-a20f-4156-9ece-8dc038c9f749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 660.753021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquired lock "refresh_cache-b908bdf3-a20f-4156-9ece-8dc038c9f749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.753021] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 660.753021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 0251b7aa02ef403d93ee15ccc8562965 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 660.759993] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0251b7aa02ef403d93ee15ccc8562965 [ 660.796764] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.853296] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.101390] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.101390] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 083e85db44f0428592961e259dcf9bd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.109502] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 083e85db44f0428592961e259dcf9bd1 [ 661.110342] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.110888] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 661.111214] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 661.111824] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-155bea4e-20f9-4a9d-8550-95f4e8e666c4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.120919] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e216a4-f8ff-4f32-a90b-1e05914a1a05 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.147020] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c0b7aef6-0633-420d-906b-818bcc54072e could not be found. [ 661.147020] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 661.147020] env[61594]: INFO nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Took 0.03 seconds to destroy the instance on the hypervisor. [ 661.147020] env[61594]: DEBUG oslo.service.loopingcall [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.147020] env[61594]: DEBUG nova.compute.manager [-] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 661.147274] env[61594]: DEBUG nova.network.neutron [-] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 661.415151] env[61594]: DEBUG nova.network.neutron [-] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.416292] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3a1217850cce4fc18a7ba505d4cfab7c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.429473] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a1217850cce4fc18a7ba505d4cfab7c [ 661.430147] env[61594]: DEBUG nova.network.neutron [-] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.431652] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a1bddc6ac83b499e8a12394ce5cfc5f9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.452379] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1bddc6ac83b499e8a12394ce5cfc5f9 [ 661.452379] env[61594]: INFO nova.compute.manager [-] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Took 0.31 seconds to deallocate network for instance. [ 661.453461] env[61594]: DEBUG nova.compute.claims [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 661.453809] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.454539] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.457104] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 6351bcaa7d8e4dc493bc7a158aec8fad in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.520997] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6351bcaa7d8e4dc493bc7a158aec8fad [ 661.667129] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecb6a48-e943-4acd-a7e3-cd9b3e31bfec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.677191] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2804939-a4c1-45c6-a72d-1fd1e802d257 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.714460] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a5bb083-c084-41e3-8853-17522b681710 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.718973] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f7dcdd-438e-4b39-b0e1-4179caeb9701 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.734941] env[61594]: DEBUG nova.compute.provider_tree [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.735484] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 598330440a4a47959564fc215d38bc8b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.746624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 598330440a4a47959564fc215d38bc8b [ 661.747796] env[61594]: DEBUG nova.scheduler.client.report [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 661.750899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 743be1a1b67e4a5db2b821b7697aa2a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.772278] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 743be1a1b67e4a5db2b821b7697aa2a1 [ 661.773428] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.319s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.774015] env[61594]: ERROR nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Traceback (most recent call last): [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self.driver.spawn(context, instance, image_meta, [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] vm_ref = self.build_virtual_machine(instance, [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] vif_infos = vmwarevif.get_vif_info(self._session, [ 661.774015] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] for vif in network_info: [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return self._sync_wrapper(fn, *args, **kwargs) [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self.wait() [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self[:] = self._gt.wait() [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return self._exit_event.wait() [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] result = hub.switch() [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 661.774391] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return self.greenlet.switch() [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] result = function(*args, **kwargs) [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] return func(*args, **kwargs) [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] raise e [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] nwinfo = self.network_api.allocate_for_instance( [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] created_port_ids = self._update_ports_for_instance( [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] with excutils.save_and_reraise_exception(): [ 661.774823] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] self.force_reraise() [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] raise self.value [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] updated_port = self._update_port( [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] _ensure_no_port_binding_failure(port) [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] raise exception.PortBindingFailed(port_id=port['id']) [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] nova.exception.PortBindingFailed: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. [ 661.775240] env[61594]: ERROR nova.compute.manager [instance: c0b7aef6-0633-420d-906b-818bcc54072e] [ 661.775571] env[61594]: DEBUG nova.compute.utils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 661.776787] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Build of instance c0b7aef6-0633-420d-906b-818bcc54072e was re-scheduled: Binding failed for port 07c16f5c-e943-48a0-81a3-e7f9ee9ca363, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 661.777224] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 661.777454] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.777658] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.777831] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 661.778245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 74f2f9a5b80e41f8ba579df238e1a7e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.782249] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.782249] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 5bb7cef88bc8455f9ca8b3e9b1be3413 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.786604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74f2f9a5b80e41f8ba579df238e1a7e3 [ 661.791377] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bb7cef88bc8455f9ca8b3e9b1be3413 [ 661.791933] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Releasing lock "refresh_cache-5aa53314-d177-4e8a-a2f5-ae0db9d30a66" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.792232] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 661.792338] env[61594]: DEBUG nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 661.792502] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 661.827874] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.828482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg a39bd75dfa824b01bbac075ce9bd69a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.839300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a39bd75dfa824b01bbac075ce9bd69a7 [ 661.839955] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Releasing lock "refresh_cache-b908bdf3-a20f-4156-9ece-8dc038c9f749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 661.840367] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 661.840557] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 661.841094] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c330fa3-21cc-4387-81bf-f9ffc41220e6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.850802] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b366051f-76bb-4db1-b6ba-edbaf65854ff {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.873534] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b908bdf3-a20f-4156-9ece-8dc038c9f749 could not be found. [ 661.873766] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 661.873975] env[61594]: INFO nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Took 0.03 seconds to destroy the instance on the hypervisor. [ 661.874287] env[61594]: DEBUG oslo.service.loopingcall [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 661.874513] env[61594]: DEBUG nova.compute.manager [-] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 661.874604] env[61594]: DEBUG nova.network.neutron [-] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 661.878161] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.900776] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.902409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg beed60f139a14c8eae2ffd7c457c48fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.910282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg beed60f139a14c8eae2ffd7c457c48fe [ 661.910282] env[61594]: DEBUG nova.network.neutron [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.910645] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 76adf585039e4fdda795cc01ec7f5ad0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.922136] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76adf585039e4fdda795cc01ec7f5ad0 [ 661.922232] env[61594]: INFO nova.compute.manager [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] [instance: 5aa53314-d177-4e8a-a2f5-ae0db9d30a66] Took 0.13 seconds to deallocate network for instance. [ 661.924159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 7ce0222aa37947678b9c4b41cae1dbe4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.968159] env[61594]: DEBUG nova.network.neutron [-] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 661.969831] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cb75af58c1e14b84a58388338a02ef60 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.984431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb75af58c1e14b84a58388338a02ef60 [ 661.984431] env[61594]: DEBUG nova.network.neutron [-] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.984431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e6fe17ff576a406b8ecad33754fc835e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.992592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6fe17ff576a406b8ecad33754fc835e [ 661.993150] env[61594]: INFO nova.compute.manager [-] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Took 0.12 seconds to deallocate network for instance. [ 661.995230] env[61594]: DEBUG nova.compute.claims [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 661.995421] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.995626] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.997537] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 9836592a5276410da1b7d7677b1dd359 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 661.999313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ce0222aa37947678b9c4b41cae1dbe4 [ 662.001904] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg c327b02edf3844899a904a5c83ef1455 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.007021] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Successfully created port: 48426ae3-8620-4cde-b703-aa3f7b25ab22 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 662.043977] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9836592a5276410da1b7d7677b1dd359 [ 662.044593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c327b02edf3844899a904a5c83ef1455 [ 662.085182] env[61594]: INFO nova.scheduler.client.report [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Deleted allocations for instance 5aa53314-d177-4e8a-a2f5-ae0db9d30a66 [ 662.092902] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Expecting reply to msg 229f6e63bfde490d8d56d7ad6f2d85fa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.120120] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 229f6e63bfde490d8d56d7ad6f2d85fa [ 662.121367] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8b6d1d73-2883-43ce-afed-5f004f6093e2 tempest-ServerActionsTestJSON-1372202196 tempest-ServerActionsTestJSON-1372202196-project-member] Lock "5aa53314-d177-4e8a-a2f5-ae0db9d30a66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.621s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.224041] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b08aaa-b1f8-4ab4-af38-f9f6245db37c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.241197] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b725abc6-8bb8-450d-a8df-f15e69165cc1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.285870] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a8ed70-a015-4bee-b010-d2bdd00566cd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.295981] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcf9208-f5b0-4161-9601-cec8f5a5e2c2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.310072] env[61594]: DEBUG nova.compute.provider_tree [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.310663] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 937017b660b64e6699d75a01595a3c56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.320828] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 937017b660b64e6699d75a01595a3c56 [ 662.322383] env[61594]: DEBUG nova.scheduler.client.report [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 662.326068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg f5783c4289db43c6b588e38772cbc0a0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.352436] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5783c4289db43c6b588e38772cbc0a0 [ 662.353071] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.357s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.354112] env[61594]: ERROR nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Traceback (most recent call last): [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self.driver.spawn(context, instance, image_meta, [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self._vmops.spawn(context, instance, image_meta, injected_files, [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] vm_ref = self.build_virtual_machine(instance, [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] vif_infos = vmwarevif.get_vif_info(self._session, [ 662.354112] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] for vif in network_info: [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return self._sync_wrapper(fn, *args, **kwargs) [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self.wait() [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self[:] = self._gt.wait() [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return self._exit_event.wait() [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] result = hub.switch() [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 662.354518] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return self.greenlet.switch() [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] result = function(*args, **kwargs) [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] return func(*args, **kwargs) [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] raise e [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] nwinfo = self.network_api.allocate_for_instance( [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] created_port_ids = self._update_ports_for_instance( [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] with excutils.save_and_reraise_exception(): [ 662.354857] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] self.force_reraise() [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] raise self.value [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] updated_port = self._update_port( [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] _ensure_no_port_binding_failure(port) [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] raise exception.PortBindingFailed(port_id=port['id']) [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] nova.exception.PortBindingFailed: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. [ 662.355204] env[61594]: ERROR nova.compute.manager [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] [ 662.355550] env[61594]: DEBUG nova.compute.utils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 662.358540] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Build of instance b908bdf3-a20f-4156-9ece-8dc038c9f749 was re-scheduled: Binding failed for port 2caa9152-e1cc-4d04-9e73-221b56b1f580, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 662.358540] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 662.360066] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "refresh_cache-b908bdf3-a20f-4156-9ece-8dc038c9f749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 662.360251] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquired lock "refresh_cache-b908bdf3-a20f-4156-9ece-8dc038c9f749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.360650] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 662.361138] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg c2df76b10955445dac4133ae9f305ac6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.369301] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2df76b10955445dac4133ae9f305ac6 [ 662.432055] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Acquiring lock "88c75c70-0ed0-4f19-bdb5-60bb07307b26" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.432055] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Lock "88c75c70-0ed0-4f19-bdb5-60bb07307b26" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.434254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg e04335b9402d4f11aa6f1eff619dfd6a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.450279] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e04335b9402d4f11aa6f1eff619dfd6a [ 662.451209] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 662.453020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg efb8134ad8864d96b4d3ca494b2fcc36 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.471481] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.503033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efb8134ad8864d96b4d3ca494b2fcc36 [ 662.524516] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 662.525076] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 662.528736] env[61594]: INFO nova.compute.claims [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.528736] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg 6b8c8ee88a4c4b8c8b8e4d5892cce0a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.574181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b8c8ee88a4c4b8c8b8e4d5892cce0a5 [ 662.576493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg 0ab37979263947ae8043337e93e95da8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.592980] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ab37979263947ae8043337e93e95da8 [ 662.735063] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8604d2-b9e0-46dd-aea5-ed11c9f53db9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.743718] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170e32b7-22de-429d-a702-16ebbe880e32 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.779806] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.780375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 4cb5f30742b94edb90a572e87fae6a0e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.784386] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38f9f8c-8007-42f3-8673-9adea0d14093 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.790072] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e371a7f-84ca-4bad-a3d3-e4e654e50761 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.794293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cb5f30742b94edb90a572e87fae6a0e [ 662.794895] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-c0b7aef6-0633-420d-906b-818bcc54072e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.795123] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 662.795302] env[61594]: DEBUG nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 662.795516] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 662.808279] env[61594]: DEBUG nova.compute.provider_tree [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.808796] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg f0435aae73b94ffa9e5e2a9891d1a84a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.819890] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0435aae73b94ffa9e5e2a9891d1a84a [ 662.821426] env[61594]: DEBUG nova.scheduler.client.report [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 662.824303] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg ce89f9f592b54f37869f2a7f1d12f5f2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.841477] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce89f9f592b54f37869f2a7f1d12f5f2 [ 662.842112] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 662.842625] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 662.844866] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg e0e5004fbacc4295ae465b3c49f2b809 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.859810] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.860271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg f0630b167c4c40da9ecba9f3623055d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.872208] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0630b167c4c40da9ecba9f3623055d8 [ 662.872679] env[61594]: DEBUG nova.network.neutron [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.873168] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 14fc1297fc144804b32020e2eb5e12cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.886993] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14fc1297fc144804b32020e2eb5e12cc [ 662.887774] env[61594]: INFO nova.compute.manager [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: c0b7aef6-0633-420d-906b-818bcc54072e] Took 0.09 seconds to deallocate network for instance. [ 662.890279] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 34c6018f04714052b3e3b84aa98a00b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.893802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0e5004fbacc4295ae465b3c49f2b809 [ 662.895027] env[61594]: DEBUG nova.compute.utils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.895862] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg e5c701f460a448ee8d3daa38bb2898f1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.900048] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 662.900048] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 662.908552] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5c701f460a448ee8d3daa38bb2898f1 [ 662.909154] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 662.911113] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg baafff05c0b841719384fabba24c8dce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.956198] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34c6018f04714052b3e3b84aa98a00b1 [ 662.959624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 611e673c3de24bf9a65e4aff48631a86 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.964020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg baafff05c0b841719384fabba24c8dce [ 662.964020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg b398f8c16ff24a2db2b3279368128f58 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 662.998170] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b398f8c16ff24a2db2b3279368128f58 [ 662.998742] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 611e673c3de24bf9a65e4aff48631a86 [ 662.999877] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 663.025469] env[61594]: INFO nova.scheduler.client.report [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Deleted allocations for instance c0b7aef6-0633-420d-906b-818bcc54072e [ 663.032919] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 663.033176] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 663.033336] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 663.033521] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 663.033668] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 663.033815] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 663.034041] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 663.034208] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 663.034386] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 663.035020] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 663.035231] env[61594]: DEBUG nova.virt.hardware [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 663.036132] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11df0ea-3843-466b-bc74-0f7912c092cb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.039184] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 47d667d42b4e418d9b33cdac89b998c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 663.045550] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4011945f-a80c-4e46-a911-aacfb226d1de {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.062217] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 47d667d42b4e418d9b33cdac89b998c5 [ 663.063293] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60b7bf0a-a10e-4ad3-8b7f-31d0ceba0e55 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "c0b7aef6-0633-420d-906b-818bcc54072e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.336s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 663.212033] env[61594]: DEBUG nova.policy [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32cf35f7cdf54e61ba76eca15c5b6045', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ada332ead1f5480191a0bd8bf9496847', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 663.440910] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.442542] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg e35def5b91d140f9a95ac099399cfe7c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 663.463890] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e35def5b91d140f9a95ac099399cfe7c [ 663.465097] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Releasing lock "refresh_cache-b908bdf3-a20f-4156-9ece-8dc038c9f749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 663.465266] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 663.465666] env[61594]: DEBUG nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 663.465771] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 663.580623] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 663.581335] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg bc45934e8d6c4e368fc1de2cc3c3b8fd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 663.590785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc45934e8d6c4e368fc1de2cc3c3b8fd [ 663.591326] env[61594]: DEBUG nova.network.neutron [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.591847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 9f40966203714fd8beac4543e94d9e93 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 663.607271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f40966203714fd8beac4543e94d9e93 [ 663.607271] env[61594]: INFO nova.compute.manager [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: b908bdf3-a20f-4156-9ece-8dc038c9f749] Took 0.14 seconds to deallocate network for instance. [ 663.607271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 7d70cffce4bc47ae89aab0ce34158859 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 663.659848] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d70cffce4bc47ae89aab0ce34158859 [ 663.662484] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 2f31439fb00544f788fee76c9793e399 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 663.703333] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f31439fb00544f788fee76c9793e399 [ 663.742709] env[61594]: INFO nova.scheduler.client.report [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Deleted allocations for instance b908bdf3-a20f-4156-9ece-8dc038c9f749 [ 663.749943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 86530e7f728e4562bb162514c40b4837 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 663.767971] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86530e7f728e4562bb162514c40b4837 [ 663.767971] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5836cce4-2eb7-4522-95a9-9d5e8a10c623 tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "b908bdf3-a20f-4156-9ece-8dc038c9f749" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.147s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.811626] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Acquiring lock "e9f942ac-414f-4d15-9b7a-c26e6612a787" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.811626] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Lock "e9f942ac-414f-4d15-9b7a-c26e6612a787" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.811626] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg aebf7a29a1b64d56ab544ee9ba6326b9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 664.823699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aebf7a29a1b64d56ab544ee9ba6326b9 [ 664.824261] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 664.825935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg b7f70159ca55446ab923f0defb76c01e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 664.891785] env[61594]: WARNING oslo_vmware.rw_handles [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 664.891785] env[61594]: ERROR oslo_vmware.rw_handles [ 664.891785] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 664.895012] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 664.895012] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Copying Virtual Disk [datastore1] vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/4b9b2c6f-f7d8-4806-afe9-5cc8eedd64e7/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 664.895012] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7f70159ca55446ab923f0defb76c01e [ 664.895012] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-412266d0-50f6-41d8-a4c5-dbc100ba724e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.911121] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Waiting for the task: (returnval){ [ 664.911121] env[61594]: value = "task-1291380" [ 664.911121] env[61594]: _type = "Task" [ 664.911121] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.911121] env[61594]: ERROR nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. [ 664.911121] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 664.911121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 664.911121] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 664.911121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 664.911121] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 664.911121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 664.911121] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 664.911121] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.911121] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 664.911121] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.911121] env[61594]: ERROR nova.compute.manager raise self.value [ 664.911597] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 664.911597] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 664.911597] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.911597] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 664.911597] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.911597] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 664.911597] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. [ 664.911597] env[61594]: ERROR nova.compute.manager [ 664.911597] env[61594]: Traceback (most recent call last): [ 664.911597] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 664.911597] env[61594]: listener.cb(fileno) [ 664.911597] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 664.911597] env[61594]: result = function(*args, **kwargs) [ 664.911597] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 664.911597] env[61594]: return func(*args, **kwargs) [ 664.911597] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 664.911597] env[61594]: raise e [ 664.911597] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 664.911597] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 664.911597] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 664.911597] env[61594]: created_port_ids = self._update_ports_for_instance( [ 664.911597] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 664.911597] env[61594]: with excutils.save_and_reraise_exception(): [ 664.911597] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.911597] env[61594]: self.force_reraise() [ 664.911597] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.911597] env[61594]: raise self.value [ 664.912570] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 664.912570] env[61594]: updated_port = self._update_port( [ 664.912570] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.912570] env[61594]: _ensure_no_port_binding_failure(port) [ 664.912570] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.912570] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 664.912570] env[61594]: nova.exception.PortBindingFailed: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. [ 664.912570] env[61594]: Removing descriptor: 21 [ 664.912570] env[61594]: ERROR nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. [ 664.912570] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Traceback (most recent call last): [ 664.912570] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 664.912570] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] yield resources [ 664.912570] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 664.912570] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self.driver.spawn(context, instance, image_meta, [ 664.912570] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 664.912570] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self._vmops.spawn(context, instance, image_meta, injected_files, [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] vm_ref = self.build_virtual_machine(instance, [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] vif_infos = vmwarevif.get_vif_info(self._session, [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] for vif in network_info: [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return self._sync_wrapper(fn, *args, **kwargs) [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self.wait() [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self[:] = self._gt.wait() [ 664.913063] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return self._exit_event.wait() [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] result = hub.switch() [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return self.greenlet.switch() [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] result = function(*args, **kwargs) [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return func(*args, **kwargs) [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] raise e [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 664.913435] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] nwinfo = self.network_api.allocate_for_instance( [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] created_port_ids = self._update_ports_for_instance( [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] with excutils.save_and_reraise_exception(): [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self.force_reraise() [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] raise self.value [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] updated_port = self._update_port( [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] _ensure_no_port_binding_failure(port) [ 664.913792] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 664.916065] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] raise exception.PortBindingFailed(port_id=port['id']) [ 664.916065] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] nova.exception.PortBindingFailed: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. [ 664.916065] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] [ 664.916065] env[61594]: INFO nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Terminating instance [ 664.920278] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 664.920472] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquired lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.921175] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 664.921175] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 6353ddf44afd426abfe8599a1316fa78 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 664.933414] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Task: {'id': task-1291380, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.934833] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.935070] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.936556] env[61594]: INFO nova.compute.claims [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 664.938118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 9fabb3898b1449e89ce46ac2596e526f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 664.943360] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6353ddf44afd426abfe8599a1316fa78 [ 664.978270] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fabb3898b1449e89ce46ac2596e526f [ 664.981874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 8a8123c076284243abbfccb447330003 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 664.996173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a8123c076284243abbfccb447330003 [ 665.045753] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.146385] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5601d9-07f4-488d-9d10-d8648b5eb1b2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.159038] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efe03674-cf2d-4398-ac51-730691a76990 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.197249] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8061d5d4-ac84-454e-93ed-8ebe5799881f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.206905] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15685ca-b400-48ea-97ca-5bc6f142a4cc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.221351] env[61594]: DEBUG nova.compute.provider_tree [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.221952] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 5d51b041d8b44f2aab95806680642cd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 665.233290] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d51b041d8b44f2aab95806680642cd1 [ 665.233696] env[61594]: DEBUG nova.scheduler.client.report [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 665.236076] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 7c052915249a4d8c8ffcf1788f3bfb68 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 665.253149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c052915249a4d8c8ffcf1788f3bfb68 [ 665.254064] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.254605] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 665.257353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 6ab65375baa0458ca5f4747dab2dabe7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 665.303149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ab65375baa0458ca5f4747dab2dabe7 [ 665.304624] env[61594]: DEBUG nova.compute.utils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 665.305227] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg d1f332ee2a4c46629784fa06c4cc84f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 665.306339] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 665.306906] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 665.321855] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1f332ee2a4c46629784fa06c4cc84f4 [ 665.321855] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 665.323697] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 4d6adadb3b3f4e2a8344bd699a4d006e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 665.378890] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d6adadb3b3f4e2a8344bd699a4d006e [ 665.384139] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg ae905f0d578943919e06a4368918f3bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 665.422377] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae905f0d578943919e06a4368918f3bd [ 665.422883] env[61594]: DEBUG oslo_vmware.exceptions [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 665.424196] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 665.426584] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 665.427483] env[61594]: ERROR nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 665.427483] env[61594]: Faults: ['InvalidArgument'] [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Traceback (most recent call last): [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] yield resources [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self.driver.spawn(context, instance, image_meta, [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self._vmops.spawn(context, instance, image_meta, injected_files, [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self._fetch_image_if_missing(context, vi) [ 665.427483] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] image_cache(vi, tmp_image_ds_loc) [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] vm_util.copy_virtual_disk( [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] session._wait_for_task(vmdk_copy_task) [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] return self.wait_for_task(task_ref) [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] return evt.wait() [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] result = hub.switch() [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 665.427891] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] return self.greenlet.switch() [ 665.428262] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 665.428262] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self.f(*self.args, **self.kw) [ 665.428262] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 665.428262] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] raise exceptions.translate_fault(task_info.error) [ 665.428262] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 665.428262] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Faults: ['InvalidArgument'] [ 665.428262] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] [ 665.428262] env[61594]: INFO nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Terminating instance [ 665.429863] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.431802] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.433053] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquiring lock "refresh_cache-04fd7039-c2c8-4b78-8c3d-37eb66fe2115" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 665.433053] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquired lock "refresh_cache-04fd7039-c2c8-4b78-8c3d-37eb66fe2115" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.433053] env[61594]: DEBUG nova.network.neutron [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 665.433282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg cdae1a48f8e743b48219106b08774f72 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 665.434127] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-481308fa-918e-4f42-853c-89dbb952b338 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.437808] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Successfully created port: d5c3fc88-4154-47af-858d-9db76308a133 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.447497] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.447497] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 665.448684] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdae1a48f8e743b48219106b08774f72 [ 665.451487] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36a495f3-a3ff-407a-9f78-253d8f75eb59 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.467488] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Waiting for the task: (returnval){ [ 665.467488] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]520eb096-1d36-16fb-19f6-30878709ad8e" [ 665.467488] env[61594]: _type = "Task" [ 665.467488] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.474773] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 665.475041] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 665.475219] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.475404] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 665.475583] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.475691] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 665.475888] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 665.476058] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 665.476224] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 665.476384] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 665.477214] env[61594]: DEBUG nova.virt.hardware [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 665.478663] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa400e6-3c81-47cc-904c-a2a8abcdad62 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.492920] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 665.492920] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Creating directory with path [datastore1] vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.492920] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ddfd46e-6039-415a-ae8b-24cbfd5da953 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.500142] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f707d2a3-d622-4eaa-8078-b22c6e34ff85 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.520019] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Created directory with path [datastore1] vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.520019] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Fetch image to [datastore1] vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 665.520019] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 665.520019] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64addfc5-dfa4-4f43-b92f-f6cef7da63c5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.525640] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f343369-66c1-4336-83f3-18422435c2d2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.538059] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6084f069-c568-4cb5-8663-5e0e146358cb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.568490] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97b0ff0-5a38-4bae-b37b-cf0958c5fad9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.575052] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-012c0ce6-f0ee-48c8-ab79-43fa88b24c9e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.605416] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 665.677639] env[61594]: DEBUG oslo_vmware.rw_handles [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 665.749880] env[61594]: DEBUG nova.network.neutron [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 665.755311] env[61594]: DEBUG oslo_vmware.rw_handles [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 665.755572] env[61594]: DEBUG oslo_vmware.rw_handles [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 665.952308] env[61594]: DEBUG nova.policy [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ac9d079b0e3244f6bf0871a4eea21b49', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '387cec2bf6194b85a24dec9167f9e2a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 665.984485] env[61594]: DEBUG nova.network.neutron [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.985797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 0db9cf14c79e4bd899397ce4ef81a239 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.002783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0db9cf14c79e4bd899397ce4ef81a239 [ 666.002783] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Releasing lock "refresh_cache-04fd7039-c2c8-4b78-8c3d-37eb66fe2115" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.003099] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 666.003829] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 666.005509] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-533515c3-2582-41ee-b41e-95157e363840 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.026810] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 666.026810] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-569b7a07-eba4-4983-9007-228bee7804cb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.056702] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 666.056802] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 666.057621] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Deleting the datastore file [datastore1] 04fd7039-c2c8-4b78-8c3d-37eb66fe2115 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.057621] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-63f49150-4bf9-4171-ab78-876fa758860b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.066899] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Waiting for the task: (returnval){ [ 666.066899] env[61594]: value = "task-1291382" [ 666.066899] env[61594]: _type = "Task" [ 666.066899] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.083505] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Task: {'id': task-1291382, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.109973] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.110557] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 55657161c3b244859fcd43a1f2db386f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.122354] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55657161c3b244859fcd43a1f2db386f [ 666.123054] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Releasing lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 666.123470] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 666.123633] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 666.124897] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90f8aa87-ab2d-482e-9d09-3318906d8bc0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.130238] env[61594]: DEBUG nova.compute.manager [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Received event network-changed-ee5f9094-c2bd-4d83-b04f-0fb72ece6737 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 666.130238] env[61594]: DEBUG nova.compute.manager [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Refreshing instance network info cache due to event network-changed-ee5f9094-c2bd-4d83-b04f-0fb72ece6737. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 666.130238] env[61594]: DEBUG oslo_concurrency.lockutils [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] Acquiring lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.130238] env[61594]: DEBUG oslo_concurrency.lockutils [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] Acquired lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.130238] env[61594]: DEBUG nova.network.neutron [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Refreshing network info cache for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 666.130490] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] Expecting reply to msg 37e2bef065a24c83a6496123e12b7c75 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.137996] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d84b43-c2b9-4277-9492-cb43f07d379d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.149928] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37e2bef065a24c83a6496123e12b7c75 [ 666.167940] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a07bc185-7541-4a42-8c83-76cdcf157167 could not be found. [ 666.167940] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 666.167940] env[61594]: INFO nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Took 0.04 seconds to destroy the instance on the hypervisor. [ 666.167940] env[61594]: DEBUG oslo.service.loopingcall [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.170301] env[61594]: DEBUG nova.compute.manager [-] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 666.170301] env[61594]: DEBUG nova.network.neutron [-] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 666.233729] env[61594]: DEBUG nova.network.neutron [-] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.234353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4c6602d5afef460d8093460775c33a76 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.235791] env[61594]: DEBUG nova.network.neutron [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.266909] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c6602d5afef460d8093460775c33a76 [ 666.266909] env[61594]: DEBUG nova.network.neutron [-] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.272063] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ddb8ceee67eb48dbae75bba998afd477 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.283944] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddb8ceee67eb48dbae75bba998afd477 [ 666.284515] env[61594]: INFO nova.compute.manager [-] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Took 0.11 seconds to deallocate network for instance. [ 666.288209] env[61594]: DEBUG nova.compute.claims [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 666.288305] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.288755] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.290551] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg ad6351238ea443e0b4b6f25e30a98391 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.359687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad6351238ea443e0b4b6f25e30a98391 [ 666.523882] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4acdcbc5-daf4-4131-8fbd-70a15896c87d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.543729] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1798ccea-ed45-467d-b34f-2731b2d2e4ed {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.591728] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07251c5f-1457-4200-a5cb-51a3f217904a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.598041] env[61594]: DEBUG oslo_vmware.api [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Task: {'id': task-1291382, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037976} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.600069] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.600272] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 666.600441] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 666.600637] env[61594]: INFO nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Took 0.60 seconds to destroy the instance on the hypervisor. [ 666.600889] env[61594]: DEBUG oslo.service.loopingcall [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 666.601232] env[61594]: DEBUG nova.compute.manager [-] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 666.602885] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748ce22b-993d-414c-8e02-caafc31162c2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.608697] env[61594]: DEBUG nova.compute.claims [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 666.608897] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.618505] env[61594]: DEBUG nova.compute.provider_tree [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.619567] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg a05af3fd07ab47c2a96ee0c2447b1425 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.628968] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a05af3fd07ab47c2a96ee0c2447b1425 [ 666.629488] env[61594]: DEBUG nova.scheduler.client.report [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 666.632719] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 73f24dc2df2d40ef89e02b8e8a989372 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.655822] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73f24dc2df2d40ef89e02b8e8a989372 [ 666.657524] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.368s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.657524] env[61594]: ERROR nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. [ 666.657524] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Traceback (most recent call last): [ 666.657524] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 666.657524] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self.driver.spawn(context, instance, image_meta, [ 666.657524] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 666.657524] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.657524] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.657524] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] vm_ref = self.build_virtual_machine(instance, [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] for vif in network_info: [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return self._sync_wrapper(fn, *args, **kwargs) [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self.wait() [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self[:] = self._gt.wait() [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return self._exit_event.wait() [ 666.657942] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] result = hub.switch() [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return self.greenlet.switch() [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] result = function(*args, **kwargs) [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] return func(*args, **kwargs) [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] raise e [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] nwinfo = self.network_api.allocate_for_instance( [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.658323] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] created_port_ids = self._update_ports_for_instance( [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] with excutils.save_and_reraise_exception(): [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] self.force_reraise() [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] raise self.value [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] updated_port = self._update_port( [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] _ensure_no_port_binding_failure(port) [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.658682] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] raise exception.PortBindingFailed(port_id=port['id']) [ 666.658993] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] nova.exception.PortBindingFailed: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. [ 666.658993] env[61594]: ERROR nova.compute.manager [instance: a07bc185-7541-4a42-8c83-76cdcf157167] [ 666.658993] env[61594]: DEBUG nova.compute.utils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 666.659924] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.050s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.662833] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg fa2837d0e9ea4a06a0c9d9d5cbc2d868 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.663072] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Build of instance a07bc185-7541-4a42-8c83-76cdcf157167 was re-scheduled: Binding failed for port ee5f9094-c2bd-4d83-b04f-0fb72ece6737, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 666.663523] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 666.663722] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquiring lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.704131] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa2837d0e9ea4a06a0c9d9d5cbc2d868 [ 666.825071] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f46ee45-a132-46aa-934c-d15981a9a218 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.832706] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689c98c3-9988-4a88-a3e0-23f0f93fd0ed {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.862327] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639bfc71-61a2-437f-9663-122ba934c353 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.869498] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329200c9-ca11-45c9-bc0b-75c289504fd9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.883823] env[61594]: DEBUG nova.compute.provider_tree [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.884194] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 59e00c6f2ce14520a9c278d1a6aac670 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.896385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59e00c6f2ce14520a9c278d1a6aac670 [ 666.897534] env[61594]: DEBUG nova.scheduler.client.report [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 666.899664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 765fb198e7f345e38fcf6244a650d32b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.916963] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 765fb198e7f345e38fcf6244a650d32b [ 666.917812] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.259s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.918336] env[61594]: ERROR nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 666.918336] env[61594]: Faults: ['InvalidArgument'] [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Traceback (most recent call last): [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self.driver.spawn(context, instance, image_meta, [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self._fetch_image_if_missing(context, vi) [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] image_cache(vi, tmp_image_ds_loc) [ 666.918336] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] vm_util.copy_virtual_disk( [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] session._wait_for_task(vmdk_copy_task) [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] return self.wait_for_task(task_ref) [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] return evt.wait() [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] result = hub.switch() [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] return self.greenlet.switch() [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 666.919374] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] self.f(*self.args, **self.kw) [ 666.920607] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 666.920607] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] raise exceptions.translate_fault(task_info.error) [ 666.920607] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 666.920607] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Faults: ['InvalidArgument'] [ 666.920607] env[61594]: ERROR nova.compute.manager [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] [ 666.920607] env[61594]: DEBUG nova.compute.utils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 666.920607] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Build of instance 04fd7039-c2c8-4b78-8c3d-37eb66fe2115 was re-scheduled: A specified parameter was not correct: fileType [ 666.920607] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 666.920979] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 666.921112] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquiring lock "refresh_cache-04fd7039-c2c8-4b78-8c3d-37eb66fe2115" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.921260] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Acquired lock "refresh_cache-04fd7039-c2c8-4b78-8c3d-37eb66fe2115" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.921419] env[61594]: DEBUG nova.network.neutron [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 666.921810] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 42711d2f6e3d489ea2410b2520ab4d04 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 666.929432] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42711d2f6e3d489ea2410b2520ab4d04 [ 667.184384] env[61594]: DEBUG nova.network.neutron [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.290686] env[61594]: DEBUG nova.network.neutron [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.290686] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] Expecting reply to msg 98e4be2d165142e7b08c2b2bf5b4cbfd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 667.302296] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98e4be2d165142e7b08c2b2bf5b4cbfd [ 667.305526] env[61594]: DEBUG oslo_concurrency.lockutils [req-2d7e08d4-8180-4df5-90a0-01bcc0e65660 req-c0ed6143-4ba4-4ff3-b0af-5b309eaa99f4 service nova] Releasing lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.305526] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Acquired lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.305526] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.305526] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg b85561f9c39949fb98151813a40d4030 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 667.315079] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b85561f9c39949fb98151813a40d4030 [ 667.403016] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.557816] env[61594]: DEBUG nova.network.neutron [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.558536] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 7dec248a3a2e4820bb59ca399c7e908a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 667.573525] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dec248a3a2e4820bb59ca399c7e908a [ 667.574365] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Releasing lock "refresh_cache-04fd7039-c2c8-4b78-8c3d-37eb66fe2115" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.576216] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 667.576216] env[61594]: DEBUG nova.compute.manager [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] [instance: 04fd7039-c2c8-4b78-8c3d-37eb66fe2115] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 667.577101] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg d49dfffbe1a6488e9e97b1a3dc6c8005 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 667.627568] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d49dfffbe1a6488e9e97b1a3dc6c8005 [ 667.634953] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 23bdb7f15a0d43068846a6a71b3e8aec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 667.672213] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23bdb7f15a0d43068846a6a71b3e8aec [ 667.700991] env[61594]: INFO nova.scheduler.client.report [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Deleted allocations for instance 04fd7039-c2c8-4b78-8c3d-37eb66fe2115 [ 667.709519] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Expecting reply to msg 379629181e7e4568b22d9a0ebb8bf132 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 667.727874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 379629181e7e4568b22d9a0ebb8bf132 [ 667.728975] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d5b2c684-c0d4-41f5-b9a1-c187a4e2c597 tempest-ServersAdmin275Test-1948904895 tempest-ServersAdmin275Test-1948904895-project-member] Lock "04fd7039-c2c8-4b78-8c3d-37eb66fe2115" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.943s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.072993] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.072993] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg f8310e821cf2429687f8c7414a265935 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 668.086740] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8310e821cf2429687f8c7414a265935 [ 668.086740] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Releasing lock "refresh_cache-a07bc185-7541-4a42-8c83-76cdcf157167" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.086740] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 668.086740] env[61594]: DEBUG nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 668.086740] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 668.386846] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 668.387496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 1c068b3803334a8cb4d2876971ce6af5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 668.400635] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c068b3803334a8cb4d2876971ce6af5 [ 668.400635] env[61594]: DEBUG nova.network.neutron [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.400635] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg ecc92847f3da46cabe933ac93178f6a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 668.410279] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecc92847f3da46cabe933ac93178f6a1 [ 668.413067] env[61594]: INFO nova.compute.manager [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] [instance: a07bc185-7541-4a42-8c83-76cdcf157167] Took 0.33 seconds to deallocate network for instance. [ 668.413067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg cd16505f632a4ef1a25cc9824430751b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 668.461152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd16505f632a4ef1a25cc9824430751b [ 668.462845] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg 3cbf1bf15052459ca8d616f800bfcc00 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 668.534430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3cbf1bf15052459ca8d616f800bfcc00 [ 668.560189] env[61594]: INFO nova.scheduler.client.report [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Deleted allocations for instance a07bc185-7541-4a42-8c83-76cdcf157167 [ 668.568040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Expecting reply to msg e8bdac95289241dd81743a1b2a06634b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 668.589221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8bdac95289241dd81743a1b2a06634b [ 668.591116] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf869085-50f6-4df8-9fc1-eae82e39c36d tempest-ServersAdminTestJSON-1599288174 tempest-ServersAdminTestJSON-1599288174-project-member] Lock "a07bc185-7541-4a42-8c83-76cdcf157167" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.002s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.790739] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Successfully created port: dc6c2510-4dcc-4d49-ad6f-a8b612cfa398 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 669.046483] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "410d3c8b-9be0-4863-b121-c9acffae69e4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.046606] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "410d3c8b-9be0-4863-b121-c9acffae69e4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.047115] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 6bcc1f8923e34a30bad0daa4cf7df186 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.060487] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bcc1f8923e34a30bad0daa4cf7df186 [ 669.061038] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 669.062728] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 6cb67183a8274abdb4a315ca818d155c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.117953] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cb67183a8274abdb4a315ca818d155c [ 669.138801] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.139073] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 669.143770] env[61594]: INFO nova.compute.claims [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 669.143770] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 0be4fe00c147495db56cdb57d66f2ae4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.178935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0be4fe00c147495db56cdb57d66f2ae4 [ 669.181455] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 198d0fc567274def9a7f73c25e1bba39 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.190181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 198d0fc567274def9a7f73c25e1bba39 [ 669.329258] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2191ef51-f222-4f75-9129-04ab721506c7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.341291] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20000e5-71b6-4bfa-a74e-b31f0f0ec303 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.377066] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a2839d-73bd-4117-80c8-eb803e60a522 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.386289] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c9ebc8-6bb6-462e-9619-04f809af8644 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.400515] env[61594]: DEBUG nova.compute.provider_tree [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.401634] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 81e5a6f09be144e8890823d654945166 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.411269] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81e5a6f09be144e8890823d654945166 [ 669.412368] env[61594]: DEBUG nova.scheduler.client.report [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 669.414718] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 724f5eacb30b4d9d80d2637797916ff0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.430143] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 724f5eacb30b4d9d80d2637797916ff0 [ 669.430501] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 669.431089] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 669.432783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg e330ced7179b4643a06b7b804c0f49c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.507505] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e330ced7179b4643a06b7b804c0f49c9 [ 669.509026] env[61594]: DEBUG nova.compute.utils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 669.510092] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg f994552f91f14e7589624c64a0ad1396 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.510783] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 669.510966] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 669.527361] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f994552f91f14e7589624c64a0ad1396 [ 669.528148] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 669.529253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 1363f7a6e5a240d58d661f742c911fe5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.566465] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1363f7a6e5a240d58d661f742c911fe5 [ 669.568815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 67d2808b7c154c71aa2d904ca6525bda in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 669.613514] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67d2808b7c154c71aa2d904ca6525bda [ 669.614821] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 669.641012] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 669.641280] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 669.641437] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 669.641616] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 669.641761] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 669.641918] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 669.642152] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 669.642309] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 669.642472] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 669.642633] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 669.642804] env[61594]: DEBUG nova.virt.hardware [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.646641] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20031f60-4d61-4e0e-9360-84b6dcaf5be2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.653139] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4143159a-d9e5-4595-ab9e-4b96da43c648 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.020120] env[61594]: DEBUG nova.policy [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '39dd18ce542f42e08215016404ffe9dc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd3952a0eb9b246b3981a76df98b855f0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 670.824452] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Acquiring lock "996e9c89-dd9c-4c76-952d-9ff90dec38c1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.824785] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Lock "996e9c89-dd9c-4c76-952d-9ff90dec38c1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.825252] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 9acc5f8b817949709640abb85b5f5211 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 670.838992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9acc5f8b817949709640abb85b5f5211 [ 670.839633] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 670.841334] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 8e0f35b0f4ae45faaee800f7a333e391 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 670.893797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e0f35b0f4ae45faaee800f7a333e391 [ 670.914993] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.915136] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.916975] env[61594]: INFO nova.compute.claims [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.919091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 61d3dc348a054034940f92d9487e6fe5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 670.959813] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61d3dc348a054034940f92d9487e6fe5 [ 670.962459] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 5ffb91073f9c4e1088b8f599e6c1f28e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 670.975341] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ffb91073f9c4e1088b8f599e6c1f28e [ 671.087110] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df840e8e-1b19-4296-af35-5a40940855ce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.094392] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b22bb5-b8eb-4891-89c6-e13d1b23370a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.131718] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a34a86-f180-4315-bfc3-4e6f70bb6b52 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.140295] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f447c463-b372-435b-91c5-c9f421f0bff2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.157833] env[61594]: DEBUG nova.compute.provider_tree [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 671.158879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 6c8ecea4de29419dba05c8b067aeeb0a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 671.169396] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c8ecea4de29419dba05c8b067aeeb0a [ 671.171430] env[61594]: DEBUG nova.scheduler.client.report [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 671.177415] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg b210292177d64a0aab07141eba7243b9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 671.190926] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b210292177d64a0aab07141eba7243b9 [ 671.192126] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.277s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.193160] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 671.197663] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 7896675cedaa41ca805a4ccb8cf8e1d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 671.256499] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7896675cedaa41ca805a4ccb8cf8e1d5 [ 671.256712] env[61594]: DEBUG nova.compute.utils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.257826] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 4e5d953f3d0f4814902d8458ea3c2d7f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 671.260902] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 671.260902] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 671.279851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e5d953f3d0f4814902d8458ea3c2d7f [ 671.281177] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 671.283541] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg cdcf5242fbc340be94378e530dff44b4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 671.339989] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdcf5242fbc340be94378e530dff44b4 [ 671.343818] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 8ca85a8ee38248318277e8f106242c49 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 671.343818] env[61594]: ERROR nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. [ 671.343818] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 671.343818] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 671.343818] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 671.343818] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 671.343818] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 671.343818] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 671.343818] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 671.343818] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.343818] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 671.343818] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.343818] env[61594]: ERROR nova.compute.manager raise self.value [ 671.344272] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 671.344272] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 671.344272] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.344272] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 671.344272] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.344272] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 671.344272] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. [ 671.344272] env[61594]: ERROR nova.compute.manager [ 671.344272] env[61594]: Traceback (most recent call last): [ 671.344272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 671.344272] env[61594]: listener.cb(fileno) [ 671.344272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 671.344272] env[61594]: result = function(*args, **kwargs) [ 671.344272] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 671.344272] env[61594]: return func(*args, **kwargs) [ 671.344272] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 671.344272] env[61594]: raise e [ 671.344272] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 671.344272] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 671.344272] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 671.344272] env[61594]: created_port_ids = self._update_ports_for_instance( [ 671.344272] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 671.344272] env[61594]: with excutils.save_and_reraise_exception(): [ 671.344272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.344272] env[61594]: self.force_reraise() [ 671.344272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.344272] env[61594]: raise self.value [ 671.345160] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 671.345160] env[61594]: updated_port = self._update_port( [ 671.345160] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.345160] env[61594]: _ensure_no_port_binding_failure(port) [ 671.345160] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.345160] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 671.345160] env[61594]: nova.exception.PortBindingFailed: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. [ 671.345160] env[61594]: Removing descriptor: 17 [ 671.345160] env[61594]: ERROR nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. [ 671.345160] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Traceback (most recent call last): [ 671.345160] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 671.345160] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] yield resources [ 671.345160] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 671.345160] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self.driver.spawn(context, instance, image_meta, [ 671.345160] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] vm_ref = self.build_virtual_machine(instance, [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] vif_infos = vmwarevif.get_vif_info(self._session, [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] for vif in network_info: [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return self._sync_wrapper(fn, *args, **kwargs) [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self.wait() [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 671.345722] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self[:] = self._gt.wait() [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return self._exit_event.wait() [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] result = hub.switch() [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return self.greenlet.switch() [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] result = function(*args, **kwargs) [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return func(*args, **kwargs) [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] raise e [ 671.346161] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] nwinfo = self.network_api.allocate_for_instance( [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] created_port_ids = self._update_ports_for_instance( [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] with excutils.save_and_reraise_exception(): [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self.force_reraise() [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] raise self.value [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] updated_port = self._update_port( [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 671.346545] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] _ensure_no_port_binding_failure(port) [ 671.349352] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 671.349352] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] raise exception.PortBindingFailed(port_id=port['id']) [ 671.349352] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] nova.exception.PortBindingFailed: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. [ 671.349352] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] [ 671.349352] env[61594]: INFO nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Terminating instance [ 671.355161] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "refresh_cache-03828801-4d8a-47dc-957e-f1aa64b652da" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 671.355161] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquired lock "refresh_cache-03828801-4d8a-47dc-957e-f1aa64b652da" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.355161] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 671.355161] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 015205b41b4a4af38475e761e3316308 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 671.359837] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 015205b41b4a4af38475e761e3316308 [ 671.406237] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ca85a8ee38248318277e8f106242c49 [ 671.407290] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 671.441178] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 671.441592] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 671.441592] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 671.441971] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 671.442265] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 671.442372] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 671.442632] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 671.442912] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 671.443027] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 671.443490] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 671.443490] env[61594]: DEBUG nova.virt.hardware [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 671.444290] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed40047e-f2e8-440c-bdb8-7ef90942bbdf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.452813] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef64f2c3-a7aa-473b-8cd3-ce9198fbfdd3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.511394] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 671.695820] env[61594]: DEBUG nova.policy [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c4ea738bdd9b4b08ad5bcd2b59402c13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab80091b23ff48bf803d575c6a8a55f9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 672.325510] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.326053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 4af9af7a0baa4cbdbbcffc49104cbc10 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.339781] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4af9af7a0baa4cbdbbcffc49104cbc10 [ 672.340943] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Releasing lock "refresh_cache-03828801-4d8a-47dc-957e-f1aa64b652da" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 672.340943] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 672.341191] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 672.342043] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7357cf7a-9110-4919-9533-7c1c33e12e28 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.353961] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6640beda-4acd-43c0-8bc6-741f82f975eb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.381053] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03828801-4d8a-47dc-957e-f1aa64b652da could not be found. [ 672.381434] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 672.385438] env[61594]: INFO nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Took 0.04 seconds to destroy the instance on the hypervisor. [ 672.385438] env[61594]: DEBUG oslo.service.loopingcall [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 672.385438] env[61594]: DEBUG nova.compute.manager [-] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 672.385438] env[61594]: DEBUG nova.network.neutron [-] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 672.478032] env[61594]: DEBUG nova.network.neutron [-] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.478032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4fe29ff6f0b946989e86f2e7eb9beb9b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.486281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fe29ff6f0b946989e86f2e7eb9beb9b [ 672.487713] env[61594]: DEBUG nova.network.neutron [-] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.487713] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5577d8442699425092e8739beaf75dd6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.500714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5577d8442699425092e8739beaf75dd6 [ 672.501375] env[61594]: INFO nova.compute.manager [-] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Took 0.12 seconds to deallocate network for instance. [ 672.504047] env[61594]: DEBUG nova.compute.claims [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 672.504234] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 672.504451] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 672.506386] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 0be02f5bcd104852841f3c7a4eb900ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.582389] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0be02f5bcd104852841f3c7a4eb900ed [ 672.671298] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Successfully created port: 087fd1ae-48bb-4dec-b57e-45601c385600 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 672.728666] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfb906b-b3a1-43a0-a193-e81c6921f97f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.740538] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112bd173-f1f9-461c-8d82-10a576c256e4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.781711] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5a03f2-18d7-4bbe-95cf-86cb3c9a2007 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.790119] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09aa586f-a4a8-4b56-8725-a3ae6e962dd3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.805350] env[61594]: DEBUG nova.compute.provider_tree [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.805832] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 07ef6826e3d54e0188f576a139d09df4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.818293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07ef6826e3d54e0188f576a139d09df4 [ 672.819370] env[61594]: DEBUG nova.scheduler.client.report [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 672.821737] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 41cad2e1d0934389b6d72dd2dfe29f2f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.838214] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41cad2e1d0934389b6d72dd2dfe29f2f [ 672.839045] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.334s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 672.839889] env[61594]: ERROR nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Traceback (most recent call last): [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self.driver.spawn(context, instance, image_meta, [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] vm_ref = self.build_virtual_machine(instance, [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.839889] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] for vif in network_info: [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return self._sync_wrapper(fn, *args, **kwargs) [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self.wait() [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self[:] = self._gt.wait() [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return self._exit_event.wait() [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] result = hub.switch() [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.840563] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return self.greenlet.switch() [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] result = function(*args, **kwargs) [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] return func(*args, **kwargs) [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] raise e [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] nwinfo = self.network_api.allocate_for_instance( [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] created_port_ids = self._update_ports_for_instance( [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] with excutils.save_and_reraise_exception(): [ 672.841104] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] self.force_reraise() [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] raise self.value [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] updated_port = self._update_port( [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] _ensure_no_port_binding_failure(port) [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] raise exception.PortBindingFailed(port_id=port['id']) [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] nova.exception.PortBindingFailed: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. [ 672.841613] env[61594]: ERROR nova.compute.manager [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] [ 672.842049] env[61594]: DEBUG nova.compute.utils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 672.844342] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Build of instance 03828801-4d8a-47dc-957e-f1aa64b652da was re-scheduled: Binding failed for port ec5f7e6a-c76a-4424-a98d-f10c2bdf13cc, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 672.844783] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 672.845124] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquiring lock "refresh_cache-03828801-4d8a-47dc-957e-f1aa64b652da" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.845210] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Acquired lock "refresh_cache-03828801-4d8a-47dc-957e-f1aa64b652da" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.845324] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 672.845739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 83941c13a33e4bc181c665d26efdb487 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.858214] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83941c13a33e4bc181c665d26efdb487 [ 672.912318] env[61594]: ERROR nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. [ 672.912318] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 672.912318] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 672.912318] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 672.912318] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.912318] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 672.912318] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.912318] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 672.912318] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.912318] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 672.912318] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.912318] env[61594]: ERROR nova.compute.manager raise self.value [ 672.912318] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.912318] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 672.912318] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.912318] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 672.912805] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.912805] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 672.912805] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. [ 672.912805] env[61594]: ERROR nova.compute.manager [ 672.912805] env[61594]: Traceback (most recent call last): [ 672.912805] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 672.912805] env[61594]: listener.cb(fileno) [ 672.912805] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 672.912805] env[61594]: result = function(*args, **kwargs) [ 672.912805] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.912805] env[61594]: return func(*args, **kwargs) [ 672.912805] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 672.912805] env[61594]: raise e [ 672.912805] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 672.912805] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 672.912805] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.912805] env[61594]: created_port_ids = self._update_ports_for_instance( [ 672.912805] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.912805] env[61594]: with excutils.save_and_reraise_exception(): [ 672.912805] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.912805] env[61594]: self.force_reraise() [ 672.912805] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.912805] env[61594]: raise self.value [ 672.912805] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.912805] env[61594]: updated_port = self._update_port( [ 672.912805] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.912805] env[61594]: _ensure_no_port_binding_failure(port) [ 672.912805] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.912805] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 672.913698] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. [ 672.913698] env[61594]: Removing descriptor: 19 [ 672.913698] env[61594]: ERROR nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Traceback (most recent call last): [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] yield resources [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self.driver.spawn(context, instance, image_meta, [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.913698] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] vm_ref = self.build_virtual_machine(instance, [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] for vif in network_info: [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return self._sync_wrapper(fn, *args, **kwargs) [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self.wait() [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self[:] = self._gt.wait() [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return self._exit_event.wait() [ 672.914103] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] result = hub.switch() [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return self.greenlet.switch() [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] result = function(*args, **kwargs) [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return func(*args, **kwargs) [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] raise e [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] nwinfo = self.network_api.allocate_for_instance( [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.914548] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] created_port_ids = self._update_ports_for_instance( [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] with excutils.save_and_reraise_exception(): [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self.force_reraise() [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] raise self.value [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] updated_port = self._update_port( [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] _ensure_no_port_binding_failure(port) [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.914963] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] raise exception.PortBindingFailed(port_id=port['id']) [ 672.915345] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] nova.exception.PortBindingFailed: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. [ 672.915345] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] [ 672.915345] env[61594]: INFO nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Terminating instance [ 672.917037] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Acquiring lock "refresh_cache-467ba147-20b6-41eb-852f-4097cb45ba6b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.917295] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Acquired lock "refresh_cache-467ba147-20b6-41eb-852f-4097cb45ba6b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.917698] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 672.919237] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg ed7ef3cb6abe4f1185f025c58415c7bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.932440] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed7ef3cb6abe4f1185f025c58415c7bc [ 672.987834] env[61594]: DEBUG nova.compute.manager [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Received event network-changed-d5c3fc88-4154-47af-858d-9db76308a133 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 672.988051] env[61594]: DEBUG nova.compute.manager [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Refreshing instance network info cache due to event network-changed-d5c3fc88-4154-47af-858d-9db76308a133. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 672.988259] env[61594]: DEBUG oslo_concurrency.lockutils [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] Acquiring lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.988396] env[61594]: DEBUG oslo_concurrency.lockutils [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] Acquired lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.988551] env[61594]: DEBUG nova.network.neutron [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Refreshing network info cache for port d5c3fc88-4154-47af-858d-9db76308a133 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 672.988976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] Expecting reply to msg 189063a6001042b0a6162e656d07ccd4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 672.996763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 189063a6001042b0a6162e656d07ccd4 [ 673.017097] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.020937] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.023741] env[61594]: ERROR nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. [ 673.023741] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 673.023741] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 673.023741] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 673.023741] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 673.023741] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 673.023741] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 673.023741] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 673.023741] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.023741] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 673.023741] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.023741] env[61594]: ERROR nova.compute.manager raise self.value [ 673.023741] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 673.023741] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 673.023741] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.023741] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 673.024257] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.024257] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 673.024257] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. [ 673.024257] env[61594]: ERROR nova.compute.manager [ 673.024257] env[61594]: Traceback (most recent call last): [ 673.024257] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 673.024257] env[61594]: listener.cb(fileno) [ 673.024257] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 673.024257] env[61594]: result = function(*args, **kwargs) [ 673.024257] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 673.024257] env[61594]: return func(*args, **kwargs) [ 673.024257] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 673.024257] env[61594]: raise e [ 673.024257] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 673.024257] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 673.024257] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 673.024257] env[61594]: created_port_ids = self._update_ports_for_instance( [ 673.024257] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 673.024257] env[61594]: with excutils.save_and_reraise_exception(): [ 673.024257] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.024257] env[61594]: self.force_reraise() [ 673.024257] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.024257] env[61594]: raise self.value [ 673.024257] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 673.024257] env[61594]: updated_port = self._update_port( [ 673.024257] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.024257] env[61594]: _ensure_no_port_binding_failure(port) [ 673.024257] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.024257] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 673.025022] env[61594]: nova.exception.PortBindingFailed: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. [ 673.025022] env[61594]: Removing descriptor: 24 [ 673.025380] env[61594]: ERROR nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Traceback (most recent call last): [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] yield resources [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self.driver.spawn(context, instance, image_meta, [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self._vmops.spawn(context, instance, image_meta, injected_files, [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] vm_ref = self.build_virtual_machine(instance, [ 673.025380] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] vif_infos = vmwarevif.get_vif_info(self._session, [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] for vif in network_info: [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return self._sync_wrapper(fn, *args, **kwargs) [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self.wait() [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self[:] = self._gt.wait() [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return self._exit_event.wait() [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 673.025744] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] result = hub.switch() [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return self.greenlet.switch() [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] result = function(*args, **kwargs) [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return func(*args, **kwargs) [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] raise e [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] nwinfo = self.network_api.allocate_for_instance( [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] created_port_ids = self._update_ports_for_instance( [ 673.026119] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] with excutils.save_and_reraise_exception(): [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self.force_reraise() [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] raise self.value [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] updated_port = self._update_port( [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] _ensure_no_port_binding_failure(port) [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] raise exception.PortBindingFailed(port_id=port['id']) [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] nova.exception.PortBindingFailed: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. [ 673.026486] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] [ 673.026842] env[61594]: INFO nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Terminating instance [ 673.027645] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Acquiring lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 673.134938] env[61594]: DEBUG nova.network.neutron [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.954780] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.955364] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 51b01d42a59146b5aea8dafc2fe084a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 673.957592] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.958088] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 9c5e699b2dcd4db8835babdea95ff0ec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 673.970517] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51b01d42a59146b5aea8dafc2fe084a7 [ 673.971820] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Releasing lock "refresh_cache-03828801-4d8a-47dc-957e-f1aa64b652da" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.971820] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 673.971820] env[61594]: DEBUG nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 673.971820] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 673.974070] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c5e699b2dcd4db8835babdea95ff0ec [ 673.975041] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Releasing lock "refresh_cache-467ba147-20b6-41eb-852f-4097cb45ba6b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.975041] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 673.975178] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 673.975703] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff090577-9490-4acd-9d50-b21ecbfd2c71 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.985816] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448a56d8-dfcd-4473-a3c4-1e0f9b9155ec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.009987] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 467ba147-20b6-41eb-852f-4097cb45ba6b could not be found. [ 674.010852] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 674.010852] env[61594]: INFO nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 674.010852] env[61594]: DEBUG oslo.service.loopingcall [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.011012] env[61594]: DEBUG nova.compute.manager [-] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 674.011149] env[61594]: DEBUG nova.network.neutron [-] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 674.050396] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.051856] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg f94daf05959245909e25420f262455cd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.062321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f94daf05959245909e25420f262455cd [ 674.063141] env[61594]: DEBUG nova.network.neutron [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.063718] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg c1aaaee92bf84ded8016e1b1679fb84c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.086022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1aaaee92bf84ded8016e1b1679fb84c [ 674.086022] env[61594]: INFO nova.compute.manager [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] [instance: 03828801-4d8a-47dc-957e-f1aa64b652da] Took 0.11 seconds to deallocate network for instance. [ 674.086022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg c9e81d8aff61490c89d5d3e80c2c05e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.088111] env[61594]: DEBUG nova.network.neutron [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.088663] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] Expecting reply to msg c5365fd5da8e47ad85f98f0b6d6296a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.095115] env[61594]: DEBUG nova.network.neutron [-] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.095593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4a40bdb7c31c4e9c9a58342aeb223b24 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.099934] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5365fd5da8e47ad85f98f0b6d6296a1 [ 674.100481] env[61594]: DEBUG oslo_concurrency.lockutils [req-1d7c6332-4109-4802-94dc-cea65a05ad34 req-5e7324d5-1935-4631-87f4-9913e318b500 service nova] Releasing lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.100887] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Acquired lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.101116] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 674.102143] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg 46f8b21929014200b452ea4edd33184b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.111177] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a40bdb7c31c4e9c9a58342aeb223b24 [ 674.111177] env[61594]: DEBUG nova.network.neutron [-] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.111472] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 21d5f21d3e20475c9725a11f1d2fb6de in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.112354] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46f8b21929014200b452ea4edd33184b [ 674.132090] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21d5f21d3e20475c9725a11f1d2fb6de [ 674.135868] env[61594]: INFO nova.compute.manager [-] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Took 0.12 seconds to deallocate network for instance. [ 674.135868] env[61594]: DEBUG nova.compute.claims [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 674.135868] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 674.135868] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.137829] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 5dee93846bac4678b96c7433372de3ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.146563] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9e81d8aff61490c89d5d3e80c2c05e0 [ 674.149449] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 7cbe6f03a4644cfda6b67b0f9ebe6797 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.193480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cbe6f03a4644cfda6b67b0f9ebe6797 [ 674.194595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dee93846bac4678b96c7433372de3ef [ 674.207146] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.240585] env[61594]: INFO nova.scheduler.client.report [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Deleted allocations for instance 03828801-4d8a-47dc-957e-f1aa64b652da [ 674.251613] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Expecting reply to msg 38864883837449929d064944e8f3aea6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.278385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38864883837449929d064944e8f3aea6 [ 674.279061] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a9e6f812-044e-444b-951c-26c125076904 tempest-VolumesAdminNegativeTest-225605931 tempest-VolumesAdminNegativeTest-225605931-project-member] Lock "03828801-4d8a-47dc-957e-f1aa64b652da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.166s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.366350] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc27af19-3456-44ab-9a7c-33d74afbf24e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.380172] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c23d2d-efe0-4ded-b5e9-cd04eceeb7e5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.415585] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-957f0b9d-4690-4e0f-836a-ee495f6e6e6a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.423921] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94fffc28-7ae2-48e0-a1da-229ee83d2309 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.440513] env[61594]: DEBUG nova.compute.provider_tree [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.440988] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg cc0eb77f944d48faa218d78fe4d5791e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.449538] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc0eb77f944d48faa218d78fe4d5791e [ 674.451337] env[61594]: DEBUG nova.scheduler.client.report [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 674.453042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 2722e711c06347cda2de3dcb722477d1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.466575] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2722e711c06347cda2de3dcb722477d1 [ 674.467600] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.332s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.468204] env[61594]: ERROR nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Traceback (most recent call last): [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self.driver.spawn(context, instance, image_meta, [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] vm_ref = self.build_virtual_machine(instance, [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] vif_infos = vmwarevif.get_vif_info(self._session, [ 674.468204] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] for vif in network_info: [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return self._sync_wrapper(fn, *args, **kwargs) [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self.wait() [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self[:] = self._gt.wait() [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return self._exit_event.wait() [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] result = hub.switch() [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 674.469113] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return self.greenlet.switch() [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] result = function(*args, **kwargs) [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] return func(*args, **kwargs) [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] raise e [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] nwinfo = self.network_api.allocate_for_instance( [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] created_port_ids = self._update_ports_for_instance( [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] with excutils.save_and_reraise_exception(): [ 674.469497] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] self.force_reraise() [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] raise self.value [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] updated_port = self._update_port( [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] _ensure_no_port_binding_failure(port) [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] raise exception.PortBindingFailed(port_id=port['id']) [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] nova.exception.PortBindingFailed: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. [ 674.469900] env[61594]: ERROR nova.compute.manager [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] [ 674.470251] env[61594]: DEBUG nova.compute.utils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 674.471342] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Build of instance 467ba147-20b6-41eb-852f-4097cb45ba6b was re-scheduled: Binding failed for port 48426ae3-8620-4cde-b703-aa3f7b25ab22, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 674.471761] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 674.472015] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Acquiring lock "refresh_cache-467ba147-20b6-41eb-852f-4097cb45ba6b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.472210] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Acquired lock "refresh_cache-467ba147-20b6-41eb-852f-4097cb45ba6b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.472374] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 674.472778] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 078aab75fa9d412cb80c55393b507204 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.481104] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 078aab75fa9d412cb80c55393b507204 [ 674.566943] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.928523] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.928523] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg e6b937f7e2b4442b8dbf8b684c8e8133 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 674.944469] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6b937f7e2b4442b8dbf8b684c8e8133 [ 674.944469] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Releasing lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.944469] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 674.944469] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 674.944469] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7a2c3d8a-e753-4ab2-9a50-a0f381354de9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.955826] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c03a07d-a335-42be-bc10-633c5ada9688 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.984871] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88c75c70-0ed0-4f19-bdb5-60bb07307b26 could not be found. [ 674.986721] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 674.986721] env[61594]: INFO nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Took 0.04 seconds to destroy the instance on the hypervisor. [ 674.986721] env[61594]: DEBUG oslo.service.loopingcall [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.986721] env[61594]: DEBUG nova.compute.manager [-] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 674.986721] env[61594]: DEBUG nova.network.neutron [-] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 675.098108] env[61594]: DEBUG nova.network.neutron [-] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.101024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3552713ab85a4b94ab0114cd2009cd2c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.110604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3552713ab85a4b94ab0114cd2009cd2c [ 675.111298] env[61594]: DEBUG nova.network.neutron [-] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.111709] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a17db4d655134bcd99c31d21b2b13980 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.127226] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a17db4d655134bcd99c31d21b2b13980 [ 675.127799] env[61594]: INFO nova.compute.manager [-] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Took 0.14 seconds to deallocate network for instance. [ 675.130230] env[61594]: DEBUG nova.compute.claims [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 675.130417] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.130628] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.132438] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg 6693f732c26645f481c7612393fb2bd6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.192184] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6693f732c26645f481c7612393fb2bd6 [ 675.196774] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Successfully created port: 6fc0ede2-587a-4953-acb8-f92ad2c564f4 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 675.268235] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Successfully created port: 0dc71c87-9174-4e0d-8e76-9a6fceccb007 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 675.330063] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04905b0-db1b-4d1e-b2df-827deae476a7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.339264] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df7bebf-ae93-4f57-9ae5-eaf8b27d26c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.372152] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2096b5b7-1414-46d5-96c5-1b8690d4a32e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.380372] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ad9a3f-d561-4da4-994b-ac0eefad100f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.395016] env[61594]: DEBUG nova.compute.provider_tree [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.395573] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg 11bd70ec2ff24d3d80ac347c9606eca5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.406292] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11bd70ec2ff24d3d80ac347c9606eca5 [ 675.407400] env[61594]: DEBUG nova.scheduler.client.report [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 675.409712] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg 537604e0010f428d9f9175a5b16d904f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.422773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 537604e0010f428d9f9175a5b16d904f [ 675.423790] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.424320] env[61594]: ERROR nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Traceback (most recent call last): [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self.driver.spawn(context, instance, image_meta, [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self._vmops.spawn(context, instance, image_meta, injected_files, [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] vm_ref = self.build_virtual_machine(instance, [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] vif_infos = vmwarevif.get_vif_info(self._session, [ 675.424320] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] for vif in network_info: [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return self._sync_wrapper(fn, *args, **kwargs) [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self.wait() [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self[:] = self._gt.wait() [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return self._exit_event.wait() [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] result = hub.switch() [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 675.424738] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return self.greenlet.switch() [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] result = function(*args, **kwargs) [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] return func(*args, **kwargs) [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] raise e [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] nwinfo = self.network_api.allocate_for_instance( [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] created_port_ids = self._update_ports_for_instance( [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] with excutils.save_and_reraise_exception(): [ 675.425150] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] self.force_reraise() [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] raise self.value [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] updated_port = self._update_port( [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] _ensure_no_port_binding_failure(port) [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] raise exception.PortBindingFailed(port_id=port['id']) [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] nova.exception.PortBindingFailed: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. [ 675.425529] env[61594]: ERROR nova.compute.manager [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] [ 675.426149] env[61594]: DEBUG nova.compute.utils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 675.427124] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Build of instance 88c75c70-0ed0-4f19-bdb5-60bb07307b26 was re-scheduled: Binding failed for port d5c3fc88-4154-47af-858d-9db76308a133, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 675.427626] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 675.427865] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Acquiring lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.428032] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Acquired lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.428199] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 675.428610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg bff862b4712848d5a67d21e0f870a2a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.439899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bff862b4712848d5a67d21e0f870a2a4 [ 675.561213] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.564091] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.564880] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg b29de718efd7438a9a50779d96739357 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.574248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b29de718efd7438a9a50779d96739357 [ 675.574821] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Releasing lock "refresh_cache-467ba147-20b6-41eb-852f-4097cb45ba6b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 675.575061] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 675.575260] env[61594]: DEBUG nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 675.575424] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 675.643145] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.644574] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 9938042a681c49faa585f9f3801a7de7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.657937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9938042a681c49faa585f9f3801a7de7 [ 675.659107] env[61594]: DEBUG nova.network.neutron [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.660017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 79d41fe8c9384c0f970b0c781d044c4c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.669291] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79d41fe8c9384c0f970b0c781d044c4c [ 675.670481] env[61594]: INFO nova.compute.manager [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] [instance: 467ba147-20b6-41eb-852f-4097cb45ba6b] Took 0.09 seconds to deallocate network for instance. [ 675.672646] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg b209c5edb50c4fb6b1ad1d6404b28b1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.723777] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b209c5edb50c4fb6b1ad1d6404b28b1a [ 675.725794] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg 3fa224a698d749bab52db3f7fcd2009c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.771843] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fa224a698d749bab52db3f7fcd2009c [ 675.784640] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.784789] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.785283] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 7133ce7803d044fcb7f900f77515826e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.801345] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7133ce7803d044fcb7f900f77515826e [ 675.803416] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 675.811243] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg a587a22d037544c398e71e358a31e6fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.812313] env[61594]: INFO nova.scheduler.client.report [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Deleted allocations for instance 467ba147-20b6-41eb-852f-4097cb45ba6b [ 675.825018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Expecting reply to msg c40b553662a34b1880828be616131faa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.838887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c40b553662a34b1880828be616131faa [ 675.839433] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21eeb9c5-8c2e-4625-9e3f-771252dc284c tempest-ServersWithSpecificFlavorTestJSON-2109930540 tempest-ServersWithSpecificFlavorTestJSON-2109930540-project-member] Lock "467ba147-20b6-41eb-852f-4097cb45ba6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.105s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 675.865019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a587a22d037544c398e71e358a31e6fe [ 675.884805] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.884890] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 675.886741] env[61594]: INFO nova.compute.claims [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 675.888361] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg a26e50d1eecb4b04b4a2c00a97e6a019 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.932035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a26e50d1eecb4b04b4a2c00a97e6a019 [ 675.935019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 1adb144577a840f5960fa4305c4aca27 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 675.943890] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1adb144577a840f5960fa4305c4aca27 [ 676.065561] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086a118e-98ba-4519-b865-53c496d7fab1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.074197] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01833bd0-d389-43c9-bfaf-fb5b69d4ba71 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.104825] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b9d00bb-14ac-490e-944c-8c7d178aa721 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.112641] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57fa84e-08ac-4b1f-8804-c59a820fb55c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.126398] env[61594]: DEBUG nova.compute.provider_tree [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.126907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 955a5b522a5b4feb82218e22584e2f13 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.134367] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 955a5b522a5b4feb82218e22584e2f13 [ 676.135340] env[61594]: DEBUG nova.scheduler.client.report [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 676.137695] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 5fadbaa19fb446d78927cf4852928838 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.158268] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fadbaa19fb446d78927cf4852928838 [ 676.159095] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.274s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.159578] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 676.161368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 97cf17d24bea435f8ede9d88e63b7af2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.193802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97cf17d24bea435f8ede9d88e63b7af2 [ 676.194168] env[61594]: DEBUG nova.compute.utils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 676.194777] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg e3e508a38e1b4c569a5457ca969ca1e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.198101] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 676.198101] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 676.205359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e3e508a38e1b4c569a5457ca969ca1e4 [ 676.205914] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 676.207555] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 51c286c778614730a84d11d6751a2e98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.255146] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51c286c778614730a84d11d6751a2e98 [ 676.257992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 5e0d878a9dc54d0781fd37f566c78ef1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.303319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e0d878a9dc54d0781fd37f566c78ef1 [ 676.304894] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 676.349171] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 676.349473] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 676.349651] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 676.349859] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 676.350086] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 676.350265] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 676.350500] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 676.350723] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 676.350858] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 676.351061] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 676.351258] env[61594]: DEBUG nova.virt.hardware [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.352194] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b59d91-9e4b-414c-b296-a1adcf01b753 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.362696] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbd4e8a-7218-494a-a275-1a1f86334379 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.524923] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.524923] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg f5e4030b748f41c0b8a03f4a5d392c0d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.548053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5e4030b748f41c0b8a03f4a5d392c0d [ 676.548795] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Releasing lock "refresh_cache-88c75c70-0ed0-4f19-bdb5-60bb07307b26" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.549028] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 676.549211] env[61594]: DEBUG nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 676.549420] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 676.577871] env[61594]: DEBUG nova.policy [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee5a21ff43314c1a857f6958056173f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afc5e909ec5c4dd983ece5aa3236910f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 676.651148] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.651764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg a7fde41429604e7b8d1a5e4000966af9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.660162] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7fde41429604e7b8d1a5e4000966af9 [ 676.660646] env[61594]: DEBUG nova.network.neutron [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.664015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg 6a14e77dce30431c962110f5cb07eb17 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.669795] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a14e77dce30431c962110f5cb07eb17 [ 676.670657] env[61594]: INFO nova.compute.manager [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] [instance: 88c75c70-0ed0-4f19-bdb5-60bb07307b26] Took 0.12 seconds to deallocate network for instance. [ 676.672353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg e4a5c96838f748cd9fe5163da700aa1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.744294] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4a5c96838f748cd9fe5163da700aa1a [ 676.747681] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg c6e8c916381e40cfbf21d828b371f7b8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.782120] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6e8c916381e40cfbf21d828b371f7b8 [ 676.810146] env[61594]: INFO nova.scheduler.client.report [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Deleted allocations for instance 88c75c70-0ed0-4f19-bdb5-60bb07307b26 [ 676.818498] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Expecting reply to msg c802492dfe534c71b8a975d234fa8a5a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 676.833248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c802492dfe534c71b8a975d234fa8a5a [ 676.833823] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2c8c43ec-9620-40ac-be02-52db2aedd1d3 tempest-FloatingIPsAssociationTestJSON-1985048995 tempest-FloatingIPsAssociationTestJSON-1985048995-project-member] Lock "88c75c70-0ed0-4f19-bdb5-60bb07307b26" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.402s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.377224] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Successfully created port: a0a91f44-3ef8-4505-ba8d-6a2e00e6853d {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 679.784394] env[61594]: ERROR nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. [ 679.784394] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 679.784394] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 679.784394] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 679.784394] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.784394] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 679.784394] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.784394] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 679.784394] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.784394] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 679.784394] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.784394] env[61594]: ERROR nova.compute.manager raise self.value [ 679.784394] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.784394] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 679.784394] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.784394] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 679.784983] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.784983] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 679.784983] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. [ 679.784983] env[61594]: ERROR nova.compute.manager [ 679.784983] env[61594]: Traceback (most recent call last): [ 679.784983] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 679.784983] env[61594]: listener.cb(fileno) [ 679.784983] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 679.784983] env[61594]: result = function(*args, **kwargs) [ 679.784983] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.784983] env[61594]: return func(*args, **kwargs) [ 679.784983] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 679.784983] env[61594]: raise e [ 679.784983] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 679.784983] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 679.784983] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.784983] env[61594]: created_port_ids = self._update_ports_for_instance( [ 679.784983] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.784983] env[61594]: with excutils.save_and_reraise_exception(): [ 679.784983] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.784983] env[61594]: self.force_reraise() [ 679.784983] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.784983] env[61594]: raise self.value [ 679.784983] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.784983] env[61594]: updated_port = self._update_port( [ 679.784983] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.784983] env[61594]: _ensure_no_port_binding_failure(port) [ 679.784983] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.784983] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 679.786026] env[61594]: nova.exception.PortBindingFailed: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. [ 679.786026] env[61594]: Removing descriptor: 20 [ 679.786026] env[61594]: ERROR nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Traceback (most recent call last): [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] yield resources [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self.driver.spawn(context, instance, image_meta, [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self._vmops.spawn(context, instance, image_meta, injected_files, [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 679.786026] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] vm_ref = self.build_virtual_machine(instance, [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] vif_infos = vmwarevif.get_vif_info(self._session, [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] for vif in network_info: [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return self._sync_wrapper(fn, *args, **kwargs) [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self.wait() [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self[:] = self._gt.wait() [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return self._exit_event.wait() [ 679.786394] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] result = hub.switch() [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return self.greenlet.switch() [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] result = function(*args, **kwargs) [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return func(*args, **kwargs) [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] raise e [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] nwinfo = self.network_api.allocate_for_instance( [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 679.786790] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] created_port_ids = self._update_ports_for_instance( [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] with excutils.save_and_reraise_exception(): [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self.force_reraise() [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] raise self.value [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] updated_port = self._update_port( [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] _ensure_no_port_binding_failure(port) [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 679.787283] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] raise exception.PortBindingFailed(port_id=port['id']) [ 679.787779] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] nova.exception.PortBindingFailed: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. [ 679.787779] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] [ 679.787779] env[61594]: INFO nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Terminating instance [ 679.788458] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Acquiring lock "refresh_cache-e9f942ac-414f-4d15-9b7a-c26e6612a787" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 679.788617] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Acquired lock "refresh_cache-e9f942ac-414f-4d15-9b7a-c26e6612a787" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.790788] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 679.790788] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 856656b1f404483c8392f37839c5b918 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 679.801816] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 856656b1f404483c8392f37839c5b918 [ 679.874911] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.666468] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.667591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 26549216d87d421a9b82e5057a6e300e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 680.679821] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26549216d87d421a9b82e5057a6e300e [ 680.681138] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Releasing lock "refresh_cache-e9f942ac-414f-4d15-9b7a-c26e6612a787" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 680.684687] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 680.684797] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 680.685416] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f2c2b17-f275-4758-816b-bbeafe0f10ee {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.698566] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d44e11-eabb-4436-bfd0-ad21e8095955 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.728024] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e9f942ac-414f-4d15-9b7a-c26e6612a787 could not be found. [ 680.728201] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 680.728892] env[61594]: INFO nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Took 0.04 seconds to destroy the instance on the hypervisor. [ 680.728892] env[61594]: DEBUG oslo.service.loopingcall [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 680.728892] env[61594]: DEBUG nova.compute.manager [-] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 680.729059] env[61594]: DEBUG nova.network.neutron [-] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 680.805627] env[61594]: DEBUG nova.network.neutron [-] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 680.805627] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c073b38b3c164176b6e511031d89e322 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 680.815069] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c073b38b3c164176b6e511031d89e322 [ 680.815640] env[61594]: DEBUG nova.network.neutron [-] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.815982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg de97278599cb484e988a7dd539737089 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 680.826571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de97278599cb484e988a7dd539737089 [ 680.827114] env[61594]: INFO nova.compute.manager [-] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Took 0.10 seconds to deallocate network for instance. [ 680.832074] env[61594]: DEBUG nova.compute.claims [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 680.832074] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 680.832074] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 680.834499] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg d33fbca06c534465a4acad930560e92e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 680.884752] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d33fbca06c534465a4acad930560e92e [ 680.986091] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e3fe733-48d3-4d4b-8903-744dee5cbdc8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.994391] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8ac62f-c4d5-488d-9fad-671db36490f1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.027277] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04dd76d6-4896-4f19-a787-d1dfe3d14ea9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.035097] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49450f4-f5c8-4bda-9d30-ec285191271c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.049141] env[61594]: DEBUG nova.compute.provider_tree [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.049733] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg d87973958be743f882986264d5a82086 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 681.059412] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d87973958be743f882986264d5a82086 [ 681.060747] env[61594]: DEBUG nova.scheduler.client.report [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 681.063964] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 0114c91efacb4082890dd9dabfac7908 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 681.087506] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0114c91efacb4082890dd9dabfac7908 [ 681.087506] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.255s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.087708] env[61594]: ERROR nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Traceback (most recent call last): [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self.driver.spawn(context, instance, image_meta, [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] vm_ref = self.build_virtual_machine(instance, [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.087708] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] for vif in network_info: [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return self._sync_wrapper(fn, *args, **kwargs) [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self.wait() [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self[:] = self._gt.wait() [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return self._exit_event.wait() [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] result = hub.switch() [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.088096] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return self.greenlet.switch() [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] result = function(*args, **kwargs) [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] return func(*args, **kwargs) [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] raise e [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] nwinfo = self.network_api.allocate_for_instance( [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] created_port_ids = self._update_ports_for_instance( [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] with excutils.save_and_reraise_exception(): [ 681.088508] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] self.force_reraise() [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] raise self.value [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] updated_port = self._update_port( [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] _ensure_no_port_binding_failure(port) [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] raise exception.PortBindingFailed(port_id=port['id']) [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] nova.exception.PortBindingFailed: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. [ 681.089016] env[61594]: ERROR nova.compute.manager [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] [ 681.089346] env[61594]: DEBUG nova.compute.utils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 681.098020] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Build of instance e9f942ac-414f-4d15-9b7a-c26e6612a787 was re-scheduled: Binding failed for port dc6c2510-4dcc-4d49-ad6f-a8b612cfa398, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 681.098020] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 681.098020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Acquiring lock "refresh_cache-e9f942ac-414f-4d15-9b7a-c26e6612a787" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.098020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Acquired lock "refresh_cache-e9f942ac-414f-4d15-9b7a-c26e6612a787" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.098382] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 681.098382] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg c4b10dd965bf4e9d917052b735e973be in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 681.106203] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4b10dd965bf4e9d917052b735e973be [ 681.208060] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.024584] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.025872] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 466bb5d38b234b4bafc6a74a4286f99a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.042307] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 466bb5d38b234b4bafc6a74a4286f99a [ 682.045395] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Releasing lock "refresh_cache-e9f942ac-414f-4d15-9b7a-c26e6612a787" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.045395] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 682.045395] env[61594]: DEBUG nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 682.045395] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 682.118245] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.118245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg d8616b08758c482ba4e5d88762ea467a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.127264] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8616b08758c482ba4e5d88762ea467a [ 682.127264] env[61594]: DEBUG nova.network.neutron [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.127264] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 5fabec57fa554a9bad51326c44110b63 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.138148] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fabec57fa554a9bad51326c44110b63 [ 682.138148] env[61594]: INFO nova.compute.manager [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] [instance: e9f942ac-414f-4d15-9b7a-c26e6612a787] Took 0.09 seconds to deallocate network for instance. [ 682.138148] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 8aa2d5f3cb774770bee0bba51249a891 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.192365] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aa2d5f3cb774770bee0bba51249a891 [ 682.195591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg c89729a099264a3ebad9c38236dc3a90 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.235238] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c89729a099264a3ebad9c38236dc3a90 [ 682.272122] env[61594]: INFO nova.scheduler.client.report [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Deleted allocations for instance e9f942ac-414f-4d15-9b7a-c26e6612a787 [ 682.278936] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Expecting reply to msg 70f3474f63584ffc8b6d4e27b2b2487f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.296682] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70f3474f63584ffc8b6d4e27b2b2487f [ 682.297426] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8aced41d-2fae-4c5f-a1ea-e31e722dd354 tempest-ImagesOneServerNegativeTestJSON-1423847491 tempest-ImagesOneServerNegativeTestJSON-1423847491-project-member] Lock "e9f942ac-414f-4d15-9b7a-c26e6612a787" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.488s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 682.923940] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Acquiring lock "4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.924177] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Lock "4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.924969] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg f555de62a93d4d6a8c379ce1ccff3591 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.934734] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f555de62a93d4d6a8c379ce1ccff3591 [ 682.935189] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 682.937496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 9101a1ab9fd04d9ea749493cd1f697e5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 682.973319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9101a1ab9fd04d9ea749493cd1f697e5 [ 682.990779] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 682.991054] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 682.992618] env[61594]: INFO nova.compute.claims [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.995119] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg ef733496758945f8b2e7a2095d0637c8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.033906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef733496758945f8b2e7a2095d0637c8 [ 683.033906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg eab5cbfa66064293922801d433a71cde in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.041248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eab5cbfa66064293922801d433a71cde [ 683.154642] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52f69b31-eccf-43eb-a82f-cd8a1c787346 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.165825] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d14f08-6eba-495e-9418-6d77dc1c054e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.209513] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a36ec9d7-2400-498d-95d3-cad4a83d0db9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.220016] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c84171b-d4e6-4782-82e7-3e6c5a4ea6c4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.236264] env[61594]: DEBUG nova.compute.provider_tree [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.237265] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 5ad672fed75d4a039c1d87fd1f0a4f1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.254765] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ad672fed75d4a039c1d87fd1f0a4f1a [ 683.255878] env[61594]: DEBUG nova.scheduler.client.report [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 683.261362] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg ad2867b74bee474db87a6b696dd5e6cb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.274131] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad2867b74bee474db87a6b696dd5e6cb [ 683.275021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.284s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 683.279454] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 683.281441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 900dfbd848784818972c5ed66e21f45a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.345814] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 900dfbd848784818972c5ed66e21f45a [ 683.347307] env[61594]: DEBUG nova.compute.utils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 683.349154] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg a3a3249adf554eb38cdddb561f4d994c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.353280] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 683.353280] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 683.360540] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3a3249adf554eb38cdddb561f4d994c [ 683.361199] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 683.363729] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg f004eacfcf494f2ea0636f6216e42cca in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.418767] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f004eacfcf494f2ea0636f6216e42cca [ 683.421791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 5aa25551656f4e6197fa8c1ad3a683fd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 683.459920] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aa25551656f4e6197fa8c1ad3a683fd [ 683.461747] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 683.501048] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 683.501315] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 683.501468] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 683.501643] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 683.501792] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 683.501937] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 683.502165] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 683.502321] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 683.502488] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 683.502654] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 683.502834] env[61594]: DEBUG nova.virt.hardware [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 683.504171] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9025fa0f-f755-4639-849e-cdda6324d767 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.512766] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5554e7-7d4f-4e0b-b4d8-b8bcab27b76c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.989504] env[61594]: DEBUG nova.policy [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '448e504d26b447f5bbd99acd33ada1a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '56f718ba688d493793b74486ea1f9474', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 685.079233] env[61594]: ERROR nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. [ 685.079233] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 685.079233] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 685.079233] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 685.079233] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.079233] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 685.079233] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.079233] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 685.079233] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.079233] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 685.079233] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.079233] env[61594]: ERROR nova.compute.manager raise self.value [ 685.079233] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.079233] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 685.079233] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.079233] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 685.080072] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.080072] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 685.080072] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. [ 685.080072] env[61594]: ERROR nova.compute.manager [ 685.080072] env[61594]: Traceback (most recent call last): [ 685.080072] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 685.080072] env[61594]: listener.cb(fileno) [ 685.080072] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 685.080072] env[61594]: result = function(*args, **kwargs) [ 685.080072] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.080072] env[61594]: return func(*args, **kwargs) [ 685.080072] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 685.080072] env[61594]: raise e [ 685.080072] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 685.080072] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 685.080072] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.080072] env[61594]: created_port_ids = self._update_ports_for_instance( [ 685.080072] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.080072] env[61594]: with excutils.save_and_reraise_exception(): [ 685.080072] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.080072] env[61594]: self.force_reraise() [ 685.080072] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.080072] env[61594]: raise self.value [ 685.080072] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.080072] env[61594]: updated_port = self._update_port( [ 685.080072] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.080072] env[61594]: _ensure_no_port_binding_failure(port) [ 685.080072] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.080072] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 685.081100] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. [ 685.081100] env[61594]: Removing descriptor: 21 [ 685.081100] env[61594]: ERROR nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Traceback (most recent call last): [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] yield resources [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self.driver.spawn(context, instance, image_meta, [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.081100] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] vm_ref = self.build_virtual_machine(instance, [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] for vif in network_info: [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return self._sync_wrapper(fn, *args, **kwargs) [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self.wait() [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self[:] = self._gt.wait() [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return self._exit_event.wait() [ 685.081510] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] result = hub.switch() [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return self.greenlet.switch() [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] result = function(*args, **kwargs) [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return func(*args, **kwargs) [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] raise e [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] nwinfo = self.network_api.allocate_for_instance( [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.083927] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] created_port_ids = self._update_ports_for_instance( [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] with excutils.save_and_reraise_exception(): [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self.force_reraise() [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] raise self.value [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] updated_port = self._update_port( [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] _ensure_no_port_binding_failure(port) [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.084378] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] raise exception.PortBindingFailed(port_id=port['id']) [ 685.084760] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] nova.exception.PortBindingFailed: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. [ 685.084760] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] [ 685.084760] env[61594]: INFO nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Terminating instance [ 685.084760] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.084760] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquired lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.084760] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 685.086183] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg dab6d8390e754327a0dfb8ede56b8d22 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 685.096829] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dab6d8390e754327a0dfb8ede56b8d22 [ 685.241517] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.554049] env[61594]: ERROR nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. [ 685.554049] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 685.554049] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 685.554049] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 685.554049] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.554049] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 685.554049] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.554049] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 685.554049] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.554049] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 685.554049] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.554049] env[61594]: ERROR nova.compute.manager raise self.value [ 685.554049] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.554049] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 685.554049] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.554049] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 685.555699] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.555699] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 685.555699] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. [ 685.555699] env[61594]: ERROR nova.compute.manager [ 685.555699] env[61594]: Traceback (most recent call last): [ 685.555699] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 685.555699] env[61594]: listener.cb(fileno) [ 685.555699] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 685.555699] env[61594]: result = function(*args, **kwargs) [ 685.555699] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.555699] env[61594]: return func(*args, **kwargs) [ 685.555699] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 685.555699] env[61594]: raise e [ 685.555699] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 685.555699] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 685.555699] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.555699] env[61594]: created_port_ids = self._update_ports_for_instance( [ 685.555699] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.555699] env[61594]: with excutils.save_and_reraise_exception(): [ 685.555699] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.555699] env[61594]: self.force_reraise() [ 685.555699] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.555699] env[61594]: raise self.value [ 685.555699] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.555699] env[61594]: updated_port = self._update_port( [ 685.555699] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.555699] env[61594]: _ensure_no_port_binding_failure(port) [ 685.555699] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.555699] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 685.556602] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. [ 685.556602] env[61594]: Removing descriptor: 23 [ 685.556602] env[61594]: ERROR nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Traceback (most recent call last): [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] yield resources [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self.driver.spawn(context, instance, image_meta, [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 685.556602] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] vm_ref = self.build_virtual_machine(instance, [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] for vif in network_info: [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return self._sync_wrapper(fn, *args, **kwargs) [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self.wait() [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self[:] = self._gt.wait() [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return self._exit_event.wait() [ 685.557010] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] result = hub.switch() [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return self.greenlet.switch() [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] result = function(*args, **kwargs) [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return func(*args, **kwargs) [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] raise e [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] nwinfo = self.network_api.allocate_for_instance( [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 685.557421] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] created_port_ids = self._update_ports_for_instance( [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] with excutils.save_and_reraise_exception(): [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self.force_reraise() [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] raise self.value [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] updated_port = self._update_port( [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] _ensure_no_port_binding_failure(port) [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 685.557842] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] raise exception.PortBindingFailed(port_id=port['id']) [ 685.558213] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] nova.exception.PortBindingFailed: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. [ 685.558213] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] [ 685.558213] env[61594]: INFO nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Terminating instance [ 685.558213] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Acquiring lock "refresh_cache-996e9c89-dd9c-4c76-952d-9ff90dec38c1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 685.558213] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Acquired lock "refresh_cache-996e9c89-dd9c-4c76-952d-9ff90dec38c1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.558213] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 685.558462] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg aadcb94f31974f28b313da4628c8b586 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 685.571932] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aadcb94f31974f28b313da4628c8b586 [ 685.655444] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 685.851667] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.852267] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 03e98307f14e4ccb9ec60c6381d12c83 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 685.867649] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03e98307f14e4ccb9ec60c6381d12c83 [ 685.869029] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Releasing lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 685.869029] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 685.869306] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 685.869838] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24318071-9eff-4190-9595-91996ba89214 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.883051] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1befed97-a5a6-4aff-a8f7-d922208e7d39 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.912594] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Acquiring lock "e42594f1-7bf7-4630-9ebf-950007812a14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 685.912844] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Lock "e42594f1-7bf7-4630-9ebf-950007812a14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 685.913387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg b746e85a100f4a1fb606b503afc8bc12 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 685.916083] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 410d3c8b-9be0-4863-b121-c9acffae69e4 could not be found. [ 685.916083] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 685.916083] env[61594]: INFO nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 685.916083] env[61594]: DEBUG oslo.service.loopingcall [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 685.917087] env[61594]: DEBUG nova.compute.manager [-] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 685.917087] env[61594]: DEBUG nova.network.neutron [-] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 685.926369] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b746e85a100f4a1fb606b503afc8bc12 [ 685.927540] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 685.929103] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 7d8ce879f7eb4f8497c65c916e627443 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 685.987321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d8ce879f7eb4f8497c65c916e627443 [ 686.012115] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.012434] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.013952] env[61594]: INFO nova.compute.claims [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 686.015582] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg b3b26424df91447f87b233c5349c4c9a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.016645] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.017051] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.017725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 1a23f3ac63034545b4c9be09cbdfe25e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.040492] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a23f3ac63034545b4c9be09cbdfe25e [ 686.045334] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.045334] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.045334] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 686.065861] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3b26424df91447f87b233c5349c4c9a [ 686.067256] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg c316b7718f5b4d768d2047d216109bd0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.077884] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c316b7718f5b4d768d2047d216109bd0 [ 686.140823] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.141822] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 975b165d105249a9b8a7623d6f7143f2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.153749] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 975b165d105249a9b8a7623d6f7143f2 [ 686.154421] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Releasing lock "refresh_cache-996e9c89-dd9c-4c76-952d-9ff90dec38c1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 686.154802] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 686.154991] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 686.155530] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f6471d05-9958-41ce-927a-e9fe7e8e339d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.172290] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357a18b8-9b8b-4c14-b877-b24acb57bdc4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.205763] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 996e9c89-dd9c-4c76-952d-9ff90dec38c1 could not be found. [ 686.206191] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 686.206635] env[61594]: INFO nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 686.207019] env[61594]: DEBUG oslo.service.loopingcall [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 686.208876] env[61594]: DEBUG nova.compute.manager [-] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 686.209093] env[61594]: DEBUG nova.network.neutron [-] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 686.212060] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab06847a-45dc-4e33-9b9f-0acedeff8da1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.220977] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbbda74-34de-4e94-a678-41c2fe8eeec1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.253034] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a3db50-4093-4629-ba2d-b1d929696ab6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.261837] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e34aeff-5bfc-45d1-a782-4b1697966972 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.276857] env[61594]: DEBUG nova.compute.provider_tree [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.277409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 7e8e8fa229634e93a425e627a9f716ab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.289303] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e8e8fa229634e93a425e627a9f716ab [ 686.290735] env[61594]: DEBUG nova.scheduler.client.report [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 686.292799] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg fe828a2063d64226b589c52e83fd2e7b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.315245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe828a2063d64226b589c52e83fd2e7b [ 686.316076] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.304s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.317527] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 686.319312] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 6b93c89161cd4dd2a7e40c4d642175b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.330970] env[61594]: DEBUG nova.network.neutron [-] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.331228] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ebe8b69fcbb246b6b32f6ef51157ad6c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.342032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebe8b69fcbb246b6b32f6ef51157ad6c [ 686.342032] env[61594]: DEBUG nova.network.neutron [-] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.342032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1cf7d350262b4bd6a0e8623daf0a21c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.359715] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cf7d350262b4bd6a0e8623daf0a21c5 [ 686.360337] env[61594]: INFO nova.compute.manager [-] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Took 0.15 seconds to deallocate network for instance. [ 686.362642] env[61594]: DEBUG nova.compute.claims [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 686.362813] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.363058] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 686.365232] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg fb2e9fc20de9401bb92258db115e0a6e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.374465] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b93c89161cd4dd2a7e40c4d642175b1 [ 686.375925] env[61594]: DEBUG nova.compute.utils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 686.376370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 2cbf34c85cea480f938d05831a976aac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.377304] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 686.377409] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 686.392897] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2cbf34c85cea480f938d05831a976aac [ 686.393579] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 686.395236] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 209335cba7414e7ca1d318124b504c12 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.414914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb2e9fc20de9401bb92258db115e0a6e [ 686.443019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 209335cba7414e7ca1d318124b504c12 [ 686.443480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 832f93222e33432e98b81367d747158a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.486482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 832f93222e33432e98b81367d747158a [ 686.486711] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 686.524290] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 686.524687] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 686.524862] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 686.525474] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 686.525474] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 686.525592] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 686.525763] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 686.525876] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 686.526229] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 686.527282] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 686.527851] env[61594]: DEBUG nova.virt.hardware [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 686.528739] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1375af-a23a-4aa1-b2ae-e0b6140714c7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.542266] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aafcff2c-073a-4871-9a0d-d065b740b181 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.547456] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.548411] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 686.548411] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 686.548411] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f80c4244a7204169b9978c6a85776394 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.550887] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6667a7dd-6178-49c7-aa08-5e4345b02250 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.563224] env[61594]: DEBUG nova.network.neutron [-] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.568940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f80c4244a7204169b9978c6a85776394 [ 686.570117] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b3e30b-3420-401e-a547-4404f88ada5e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.574496] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 686.574643] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 686.574771] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 686.574893] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 686.575030] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 686.575163] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 686.575667] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 686.606127] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6087469-0bcc-45ce-a886-56d53a44f16a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.614605] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2115616-f7a8-4bc2-8bc6-c4bc98ee3e59 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.628859] env[61594]: DEBUG nova.compute.provider_tree [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 686.629348] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg f35ad262ea734b1b9628867db0787ef6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.641653] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f35ad262ea734b1b9628867db0787ef6 [ 686.642677] env[61594]: DEBUG nova.scheduler.client.report [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 686.644992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 00fbce07b61d472f92e9fc76d6d7a4f8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.662019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00fbce07b61d472f92e9fc76d6d7a4f8 [ 686.662019] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.297s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 686.662019] env[61594]: ERROR nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. [ 686.662019] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Traceback (most recent call last): [ 686.662019] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 686.662019] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self.driver.spawn(context, instance, image_meta, [ 686.662019] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 686.662019] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 686.662019] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] vm_ref = self.build_virtual_machine(instance, [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] vif_infos = vmwarevif.get_vif_info(self._session, [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] for vif in network_info: [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return self._sync_wrapper(fn, *args, **kwargs) [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self.wait() [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self[:] = self._gt.wait() [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 686.662353] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return self._exit_event.wait() [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] result = hub.switch() [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return self.greenlet.switch() [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] result = function(*args, **kwargs) [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] return func(*args, **kwargs) [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] raise e [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] nwinfo = self.network_api.allocate_for_instance( [ 686.662814] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] created_port_ids = self._update_ports_for_instance( [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] with excutils.save_and_reraise_exception(): [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] self.force_reraise() [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] raise self.value [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] updated_port = self._update_port( [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] _ensure_no_port_binding_failure(port) [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 686.663246] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] raise exception.PortBindingFailed(port_id=port['id']) [ 686.663653] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] nova.exception.PortBindingFailed: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. [ 686.663653] env[61594]: ERROR nova.compute.manager [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] [ 686.663653] env[61594]: DEBUG nova.compute.utils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 686.663653] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Build of instance 996e9c89-dd9c-4c76-952d-9ff90dec38c1 was re-scheduled: Binding failed for port 6fc0ede2-587a-4953-acb8-f92ad2c564f4, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 686.663653] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 686.663865] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Acquiring lock "refresh_cache-996e9c89-dd9c-4c76-952d-9ff90dec38c1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 686.663865] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Acquired lock "refresh_cache-996e9c89-dd9c-4c76-952d-9ff90dec38c1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.663938] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 686.664301] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 7ea2b53b38384b42852eb889ba7faf32 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 686.666077] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Successfully created port: 9ae290d0-490e-4e1f-b088-547ba11d2802 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 686.673410] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ea2b53b38384b42852eb889ba7faf32 [ 686.784393] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 686.929134] env[61594]: DEBUG nova.policy [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '51bd8a6208de4c3984a79dc5ba3ba622', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '84e748c48ab44d33b8b34330bd643cbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 687.047770] env[61594]: DEBUG nova.compute.manager [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Received event network-changed-087fd1ae-48bb-4dec-b57e-45601c385600 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 687.047968] env[61594]: DEBUG nova.compute.manager [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Refreshing instance network info cache due to event network-changed-087fd1ae-48bb-4dec-b57e-45601c385600. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 687.048199] env[61594]: DEBUG oslo_concurrency.lockutils [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] Acquiring lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 687.048342] env[61594]: DEBUG oslo_concurrency.lockutils [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] Acquired lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.048502] env[61594]: DEBUG nova.network.neutron [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Refreshing network info cache for port 087fd1ae-48bb-4dec-b57e-45601c385600 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 687.048919] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] Expecting reply to msg 716494d6dcca41edabe0cb8af9de5b65 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.056364] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 716494d6dcca41edabe0cb8af9de5b65 [ 687.338246] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.338839] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg b088c27c7b3f47359df62ec787719c8c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.347653] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b088c27c7b3f47359df62ec787719c8c [ 687.348310] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Releasing lock "refresh_cache-996e9c89-dd9c-4c76-952d-9ff90dec38c1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 687.348527] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 687.348693] env[61594]: DEBUG nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 687.348873] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 687.399218] env[61594]: DEBUG nova.network.neutron [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.425046] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 687.425660] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 6ed6c0377ac54bfc84bd0271ae9137ce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.434199] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ed6c0377ac54bfc84bd0271ae9137ce [ 687.434783] env[61594]: DEBUG nova.network.neutron [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.435317] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg e16750c144ac4b679fe58cad00ea3a72 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.444541] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e16750c144ac4b679fe58cad00ea3a72 [ 687.445198] env[61594]: INFO nova.compute.manager [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] [instance: 996e9c89-dd9c-4c76-952d-9ff90dec38c1] Took 0.10 seconds to deallocate network for instance. [ 687.446763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg f7333f8903c04fa3a39d0a4770b4c293 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.489290] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7333f8903c04fa3a39d0a4770b4c293 [ 687.491941] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg 5e5eea25ec99416fb62408696eb93024 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.534443] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e5eea25ec99416fb62408696eb93024 [ 687.543631] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.543832] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.543992] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.544161] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.544502] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 7905f6e833d14397a4433ac201d797be in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.558338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7905f6e833d14397a4433ac201d797be [ 687.559418] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.559645] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.559885] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.560131] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 687.561733] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4a8a5a-e249-42be-a5dc-df75362f1048 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.565298] env[61594]: INFO nova.scheduler.client.report [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Deleted allocations for instance 996e9c89-dd9c-4c76-952d-9ff90dec38c1 [ 687.571636] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Expecting reply to msg dd18644cbbc743f39ff5accb0307f844 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.578717] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80a1be60-eefe-49b0-8b9d-43b9d24c8205 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.586721] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd18644cbbc743f39ff5accb0307f844 [ 687.587291] env[61594]: DEBUG oslo_concurrency.lockutils [None req-62cc2796-e288-4c3c-b946-37259e71a2d6 tempest-ServerDiagnosticsNegativeTest-1789156982 tempest-ServerDiagnosticsNegativeTest-1789156982-project-member] Lock "996e9c89-dd9c-4c76-952d-9ff90dec38c1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.763s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.598476] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a259ef-83ed-4a9f-860b-673403b4825b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.606454] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb1c816-b55b-4bf0-a20a-75692941fa2c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.641238] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181521MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 687.641495] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.641749] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.642766] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 66c6adb26ed04afc988e254910e4b0d2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.674022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66c6adb26ed04afc988e254910e4b0d2 [ 687.675486] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 83985ede4da34bf0bafa9223e5a37cf3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.684757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83985ede4da34bf0bafa9223e5a37cf3 [ 687.711197] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 151fefe2-b70a-4ea5-8b50-08c7968b10fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.711197] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 410d3c8b-9be0-4863-b121-c9acffae69e4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.711197] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance f2b1637b-6a4b-43e7-bd5d-d6f33abd9867 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.711197] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.711349] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance e42594f1-7bf7-4630-9ebf-950007812a14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 687.711349] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 687.711349] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 687.800643] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02974430-293b-4a44-8e95-c2749769d93b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.810164] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467c38af-5964-4c8b-9e43-64ac4856cb5c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.841943] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25018470-3a4c-4c1e-a214-e26660497ffe {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.850330] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3384ee31-8351-4833-a21c-2243b5e25807 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.864394] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.865255] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 62626f41bc77405e8e91b6be156d43d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.875825] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62626f41bc77405e8e91b6be156d43d3 [ 687.876956] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 687.879588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 808f3b6f9d53411085137eef1ab09ee7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.895243] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 808f3b6f9d53411085137eef1ab09ee7 [ 687.895243] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 687.895243] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.253s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 687.962761] env[61594]: DEBUG nova.network.neutron [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.963304] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] Expecting reply to msg 99a89b4f9a0f428fb57f83afedcd7e02 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 687.977992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99a89b4f9a0f428fb57f83afedcd7e02 [ 687.979101] env[61594]: DEBUG oslo_concurrency.lockutils [req-3674d0ec-1e08-412e-a12c-37af8debb00d req-cfeeea56-2575-463d-822b-47530c08df97 service nova] Releasing lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 688.029826] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4a9901f141ac42878dcb4f926d04af8c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 688.039032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a9901f141ac42878dcb4f926d04af8c [ 688.039516] env[61594]: DEBUG nova.network.neutron [-] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.040843] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 935e782253dc4e37a8d2b544de3a8962 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 688.053265] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 935e782253dc4e37a8d2b544de3a8962 [ 688.053457] env[61594]: INFO nova.compute.manager [-] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Took 2.14 seconds to deallocate network for instance. [ 688.056023] env[61594]: DEBUG nova.compute.claims [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 688.056023] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.056603] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.058482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 9330bebab759442792983e0b7805a366 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 688.109075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9330bebab759442792983e0b7805a366 [ 688.215677] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d0a962-362d-4d4b-87ae-e56c694a4817 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.224705] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8772d261-30cb-4773-bebf-4d3b8c041057 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.259823] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007aa4fe-ac99-493a-8d93-805c27ceeb20 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.267099] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad81757c-7522-429b-b296-89dbf5c7ea60 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.283469] env[61594]: DEBUG nova.compute.provider_tree [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.284008] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg af87b815865a47409282b746c4579c27 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 688.299471] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af87b815865a47409282b746c4579c27 [ 688.300464] env[61594]: DEBUG nova.scheduler.client.report [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 688.303246] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 2e7e34e7446041e0ada4da0d54e76701 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 688.319515] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e7e34e7446041e0ada4da0d54e76701 [ 688.321815] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 688.321815] env[61594]: ERROR nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. [ 688.321815] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Traceback (most recent call last): [ 688.321815] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 688.321815] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self.driver.spawn(context, instance, image_meta, [ 688.321815] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 688.321815] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 688.321815] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 688.321815] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] vm_ref = self.build_virtual_machine(instance, [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] vif_infos = vmwarevif.get_vif_info(self._session, [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] for vif in network_info: [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return self._sync_wrapper(fn, *args, **kwargs) [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self.wait() [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self[:] = self._gt.wait() [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return self._exit_event.wait() [ 688.322147] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] result = hub.switch() [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return self.greenlet.switch() [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] result = function(*args, **kwargs) [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] return func(*args, **kwargs) [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] raise e [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] nwinfo = self.network_api.allocate_for_instance( [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 688.322640] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] created_port_ids = self._update_ports_for_instance( [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] with excutils.save_and_reraise_exception(): [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] self.force_reraise() [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] raise self.value [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] updated_port = self._update_port( [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] _ensure_no_port_binding_failure(port) [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 688.323855] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] raise exception.PortBindingFailed(port_id=port['id']) [ 688.324219] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] nova.exception.PortBindingFailed: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. [ 688.324219] env[61594]: ERROR nova.compute.manager [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] [ 688.327285] env[61594]: DEBUG nova.compute.utils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 688.328305] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Build of instance 410d3c8b-9be0-4863-b121-c9acffae69e4 was re-scheduled: Binding failed for port 087fd1ae-48bb-4dec-b57e-45601c385600, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 688.328687] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 688.328925] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquiring lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 688.329109] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Acquired lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.329266] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 688.329661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 21a14eafee5847bfbb4dcff0c41d371e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 688.340271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21a14eafee5847bfbb4dcff0c41d371e [ 688.602799] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.071439] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.071439] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 458ea60bbe5d4244a141664eb658240b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 689.084054] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 458ea60bbe5d4244a141664eb658240b [ 689.084054] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Releasing lock "refresh_cache-410d3c8b-9be0-4863-b121-c9acffae69e4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 689.084054] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 689.084054] env[61594]: DEBUG nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 689.084054] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 689.150420] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 689.150420] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 0a04e166e1754f85b99b6571e854ee66 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 689.167493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a04e166e1754f85b99b6571e854ee66 [ 689.170238] env[61594]: DEBUG nova.network.neutron [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.170569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 4a72bb7c89bd450e9f44a2f4aa8798bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 689.178994] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a72bb7c89bd450e9f44a2f4aa8798bc [ 689.182191] env[61594]: INFO nova.compute.manager [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] [instance: 410d3c8b-9be0-4863-b121-c9acffae69e4] Took 0.10 seconds to deallocate network for instance. [ 689.182191] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg b6abf9952d30407ab7bfd619d401fa08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 689.235017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6abf9952d30407ab7bfd619d401fa08 [ 689.235971] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg b42615707a434fce9c51bd0ffd5789cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 689.286905] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b42615707a434fce9c51bd0ffd5789cc [ 689.311904] env[61594]: INFO nova.scheduler.client.report [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Deleted allocations for instance 410d3c8b-9be0-4863-b121-c9acffae69e4 [ 689.318959] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Expecting reply to msg 34b843d56f39491a888c1ff3c269a736 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 689.337942] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34b843d56f39491a888c1ff3c269a736 [ 689.337942] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5a9b7fcd-0f28-4899-91fb-ff7b93a56f7d tempest-ServersTestMultiNic-2009464467 tempest-ServersTestMultiNic-2009464467-project-member] Lock "410d3c8b-9be0-4863-b121-c9acffae69e4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.291s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.626936] env[61594]: ERROR nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. [ 689.626936] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 689.626936] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 689.626936] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 689.626936] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 689.626936] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 689.626936] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 689.626936] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 689.626936] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 689.626936] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 689.626936] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 689.626936] env[61594]: ERROR nova.compute.manager raise self.value [ 689.626936] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 689.626936] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 689.626936] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 689.626936] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 689.627640] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 689.627640] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 689.627640] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. [ 689.627640] env[61594]: ERROR nova.compute.manager [ 689.629668] env[61594]: Traceback (most recent call last): [ 689.629668] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 689.629668] env[61594]: listener.cb(fileno) [ 689.629668] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 689.629668] env[61594]: result = function(*args, **kwargs) [ 689.629668] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 689.629668] env[61594]: return func(*args, **kwargs) [ 689.629668] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 689.629668] env[61594]: raise e [ 689.629668] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 689.629668] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 689.629668] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 689.629668] env[61594]: created_port_ids = self._update_ports_for_instance( [ 689.629668] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 689.629668] env[61594]: with excutils.save_and_reraise_exception(): [ 689.629668] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 689.629668] env[61594]: self.force_reraise() [ 689.629668] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 689.629668] env[61594]: raise self.value [ 689.629668] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 689.629668] env[61594]: updated_port = self._update_port( [ 689.629668] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 689.629668] env[61594]: _ensure_no_port_binding_failure(port) [ 689.629668] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 689.629668] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 689.629668] env[61594]: nova.exception.PortBindingFailed: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. [ 689.629668] env[61594]: Removing descriptor: 22 [ 689.631089] env[61594]: ERROR nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Traceback (most recent call last): [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] yield resources [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self.driver.spawn(context, instance, image_meta, [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self._vmops.spawn(context, instance, image_meta, injected_files, [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] vm_ref = self.build_virtual_machine(instance, [ 689.631089] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] vif_infos = vmwarevif.get_vif_info(self._session, [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] for vif in network_info: [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return self._sync_wrapper(fn, *args, **kwargs) [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self.wait() [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self[:] = self._gt.wait() [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return self._exit_event.wait() [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 689.631451] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] result = hub.switch() [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return self.greenlet.switch() [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] result = function(*args, **kwargs) [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return func(*args, **kwargs) [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] raise e [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] nwinfo = self.network_api.allocate_for_instance( [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] created_port_ids = self._update_ports_for_instance( [ 689.631862] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] with excutils.save_and_reraise_exception(): [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self.force_reraise() [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] raise self.value [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] updated_port = self._update_port( [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] _ensure_no_port_binding_failure(port) [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] raise exception.PortBindingFailed(port_id=port['id']) [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] nova.exception.PortBindingFailed: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. [ 689.632277] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] [ 689.632765] env[61594]: INFO nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Terminating instance [ 689.635461] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 689.635775] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.635901] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 689.636450] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg d847c8102c884d7eb134060f6556276a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 689.653134] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d847c8102c884d7eb134060f6556276a [ 689.724396] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 690.076029] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.076029] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.076029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d261c8bd95b34a8fba214de393560905 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.101025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d261c8bd95b34a8fba214de393560905 [ 690.101025] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 690.101025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 66f631eb95844f4899a698f88dbb6361 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.152150] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66f631eb95844f4899a698f88dbb6361 [ 690.175479] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.178244] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.178244] env[61594]: INFO nova.compute.claims [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 690.180180] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg b2903c965656444a92dac6e01ba894d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.208158] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Successfully created port: 24b937ea-bba2-4255-a685-87739ea2b89e {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.237368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2903c965656444a92dac6e01ba894d3 [ 690.237368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg fa2574d0b39e4c60b7b260d27442b471 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.244702] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa2574d0b39e4c60b7b260d27442b471 [ 690.361735] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b897c06b-0408-443b-acce-fc922bbd63e0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.371929] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3901fcf3-9c7a-400a-bb40-903652be0b89 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.409473] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295c4b83-4ead-4427-883a-b6681a7c11ff {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.417536] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e616f5-a1a4-47a7-817c-0ee6330f8cc2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.433783] env[61594]: DEBUG nova.compute.provider_tree [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.434185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ef3d11e548954c0ca9987c10459e21cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.445675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef3d11e548954c0ca9987c10459e21cc [ 690.446858] env[61594]: DEBUG nova.scheduler.client.report [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 690.449105] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg e40d7ba135fd415087aa8337460c5059 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.466225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e40d7ba135fd415087aa8337460c5059 [ 690.466993] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.467487] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 690.469163] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg cb31af58831c42fb89d0a700b28a18c4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.513255] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb31af58831c42fb89d0a700b28a18c4 [ 690.514555] env[61594]: DEBUG nova.compute.utils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 690.515172] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 271d115ab441480e8775ed93bd76ab77 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.517952] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 690.517952] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 690.527505] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 271d115ab441480e8775ed93bd76ab77 [ 690.527838] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 690.529611] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 103a7c7574bb4db38072798497dc6286 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.562582] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 103a7c7574bb4db38072798497dc6286 [ 690.565506] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ad035a43020a4d44bce8d3369e6e4ff0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.597725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad035a43020a4d44bce8d3369e6e4ff0 [ 690.600216] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 690.630556] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 690.630556] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 690.630556] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.630955] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 690.630955] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.630955] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 690.631086] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 690.632151] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 690.632151] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 690.632151] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 690.632151] env[61594]: DEBUG nova.virt.hardware [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 690.635163] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a333f0a-71c0-420e-9902-6d50d33763cc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.641595] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab6ab3b-80b7-4c52-bd33-8e41a689669f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.695482] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.696016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 85cfef4d2d0f474586885652153ad74c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.707958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85cfef4d2d0f474586885652153ad74c [ 690.708710] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 690.708710] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 690.708710] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 690.709311] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e677191f-cc9f-4e50-b2a4-42090cae9315 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.718110] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcf702c0-5ed5-4310-be56-c62440f4ae11 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.741242] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f2b1637b-6a4b-43e7-bd5d-d6f33abd9867 could not be found. [ 690.741478] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 690.741663] env[61594]: INFO nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Took 0.03 seconds to destroy the instance on the hypervisor. [ 690.741909] env[61594]: DEBUG oslo.service.loopingcall [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 690.742149] env[61594]: DEBUG nova.compute.manager [-] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 690.742251] env[61594]: DEBUG nova.network.neutron [-] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 690.796865] env[61594]: DEBUG nova.policy [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '789177a2f7be455cadec45cf03d67521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dfb77f12805418eaa6127fc75becec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 690.829621] env[61594]: DEBUG nova.network.neutron [-] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 690.830488] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0188e411a8d342c48411a4664e1c841b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.846021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0188e411a8d342c48411a4664e1c841b [ 690.846021] env[61594]: DEBUG nova.network.neutron [-] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.846194] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 911025fa25e8464a959ac54e6f741004 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.867783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 911025fa25e8464a959ac54e6f741004 [ 690.868383] env[61594]: INFO nova.compute.manager [-] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Took 0.13 seconds to deallocate network for instance. [ 690.871107] env[61594]: DEBUG nova.compute.claims [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 690.871320] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.871572] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.874281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 7c8658e1f60647d1a6f1ae5265ba1a1f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 690.925144] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c8658e1f60647d1a6f1ae5265ba1a1f [ 691.028574] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2f480b-70da-465a-9dd7-63e86971dd7e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.039911] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e0a3d2-ee95-4a30-bb05-904089968bcf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.074708] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15e2484-c16c-4b52-ae03-0502c178cdad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.083556] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55ad5e7-f062-40d3-b157-f94d43da2b81 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.103176] env[61594]: DEBUG nova.compute.provider_tree [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.103724] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg f4d9734a8a2f4b1f90a2d9ffb72ac972 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.113670] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4d9734a8a2f4b1f90a2d9ffb72ac972 [ 691.114955] env[61594]: DEBUG nova.scheduler.client.report [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 691.117724] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 6346a21597374c5a835c984f8a71e865 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.130275] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6346a21597374c5a835c984f8a71e865 [ 691.131217] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.260s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.131750] env[61594]: ERROR nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Traceback (most recent call last): [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self.driver.spawn(context, instance, image_meta, [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self._vmops.spawn(context, instance, image_meta, injected_files, [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] vm_ref = self.build_virtual_machine(instance, [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] vif_infos = vmwarevif.get_vif_info(self._session, [ 691.131750] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] for vif in network_info: [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return self._sync_wrapper(fn, *args, **kwargs) [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self.wait() [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self[:] = self._gt.wait() [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return self._exit_event.wait() [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] result = hub.switch() [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 691.132261] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return self.greenlet.switch() [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] result = function(*args, **kwargs) [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] return func(*args, **kwargs) [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] raise e [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] nwinfo = self.network_api.allocate_for_instance( [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] created_port_ids = self._update_ports_for_instance( [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] with excutils.save_and_reraise_exception(): [ 691.132692] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] self.force_reraise() [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] raise self.value [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] updated_port = self._update_port( [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] _ensure_no_port_binding_failure(port) [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] raise exception.PortBindingFailed(port_id=port['id']) [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] nova.exception.PortBindingFailed: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. [ 691.133112] env[61594]: ERROR nova.compute.manager [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] [ 691.133941] env[61594]: DEBUG nova.compute.utils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 691.133994] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Build of instance f2b1637b-6a4b-43e7-bd5d-d6f33abd9867 was re-scheduled: Binding failed for port a0a91f44-3ef8-4505-ba8d-6a2e00e6853d, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 691.134500] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 691.134660] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 691.134876] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.134986] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 691.135425] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 83fbfae4ef274dd193e21d73d0fb1c83 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.143405] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 83fbfae4ef274dd193e21d73d0fb1c83 [ 691.196405] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Acquiring lock "bfa7d6f2-050a-4604-8f9c-250d89b091a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.196922] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Lock "bfa7d6f2-050a-4604-8f9c-250d89b091a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.200505] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 00ae204accbc4465a9b80972fd8fbb36 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.214370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00ae204accbc4465a9b80972fd8fbb36 [ 691.218031] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 691.219851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 4e2f2c90e6c04888b873688fbeee0eb9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.222537] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Acquiring lock "fc403a94-8ac7-4f73-a0a8-51539dcc47d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.225737] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Lock "fc403a94-8ac7-4f73-a0a8-51539dcc47d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.226225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 9bf642ba31ae48c1ba2309f730856a60 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.231107] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 691.235914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bf642ba31ae48c1ba2309f730856a60 [ 691.237650] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 691.239007] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 1692241ae66c4cc3b48dbd213e5401b2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.276499] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e2f2c90e6c04888b873688fbeee0eb9 [ 691.300592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1692241ae66c4cc3b48dbd213e5401b2 [ 691.305040] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.305300] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.307743] env[61594]: INFO nova.compute.claims [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.309960] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg cf3afe3e431e44ef99e1643173289a18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.333049] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 691.363209] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf3afe3e431e44ef99e1643173289a18 [ 691.365344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 414adcc8bd0b4c3c8f4763a0c7cf933b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.380499] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 414adcc8bd0b4c3c8f4763a0c7cf933b [ 691.525860] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182c0dd1-b990-4b6e-bdd3-c1c625bb0137 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.537819] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54e3665-c141-4b32-9cb3-a6eeda61515f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.571031] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6ab701-1dd4-47ac-bab7-cfe92daef913 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.577777] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99408a8-f51c-447b-94c8-6fb98b3d7a5a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.591414] env[61594]: DEBUG nova.compute.provider_tree [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.592040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 89d129957362466db1314c02ef4d7532 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.604958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89d129957362466db1314c02ef4d7532 [ 691.606155] env[61594]: DEBUG nova.scheduler.client.report [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 691.608464] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 06a180a82cb645f89d01bd285a1341d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.631453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06a180a82cb645f89d01bd285a1341d8 [ 691.632841] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.327s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.633352] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 691.635014] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg bb7ed5246b954cb682027b13d88fd5ad in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.635957] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.304s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.637335] env[61594]: INFO nova.compute.claims [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.638915] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 3b69bdfe762c46d3ba826e9ba6db7dcb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.682706] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb7ed5246b954cb682027b13d88fd5ad [ 691.683391] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b69bdfe762c46d3ba826e9ba6db7dcb [ 691.686610] env[61594]: DEBUG nova.compute.utils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 691.686610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 79c979ef038a4a84a4035d4f56c7d62c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.687095] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 258cb8fb41a6470eb91e0c3b1b541fc6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.688025] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 691.688196] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 691.697974] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79c979ef038a4a84a4035d4f56c7d62c [ 691.698581] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 691.700249] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg a065b4b1c8914dd3ba5c9cbd5118a039 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.701374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 258cb8fb41a6470eb91e0c3b1b541fc6 [ 691.736226] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a065b4b1c8914dd3ba5c9cbd5118a039 [ 691.739349] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 8087e2386945445fabacd98b540502e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.779152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8087e2386945445fabacd98b540502e0 [ 691.780417] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 691.816020] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 691.816287] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 691.816436] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 691.816821] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 691.816821] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 691.816900] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 691.817097] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 691.817341] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 691.817516] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 691.817737] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 691.817953] env[61594]: DEBUG nova.virt.hardware [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 691.819740] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f191351b-4a1a-47ea-b459-2566dbbd8749 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.833652] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17909cae-f1da-4552-bebf-b47d70a1a3c4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.851139] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24e5472-60a1-4534-9cd8-b93d33e2987d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.858225] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f9bede-4e1c-4ba0-b7db-9f0ce6303d39 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.889783] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460480b4-5df0-4336-9f09-43ee8b13edad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.897889] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a11be9-48ec-4b08-9f78-b645ef27fef5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.911346] env[61594]: DEBUG nova.compute.provider_tree [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.911854] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg dc324b986863422d96835252dfa60f72 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.920197] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc324b986863422d96835252dfa60f72 [ 691.921164] env[61594]: DEBUG nova.scheduler.client.report [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 691.923375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 4072be81c39e4c7fb2310b8d4fff8b3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.937595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4072be81c39e4c7fb2310b8d4fff8b3a [ 691.938366] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 691.938948] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 691.940624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg d36b2b0c684a485e9fe92ef8e733e4f3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.981013] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d36b2b0c684a485e9fe92ef8e733e4f3 [ 691.981497] env[61594]: DEBUG nova.compute.utils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 691.982355] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 34065f55b55f44bcac0b429d303928dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 691.983478] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 691.983478] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 691.995734] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34065f55b55f44bcac0b429d303928dc [ 691.995734] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 691.996866] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg e86eed1c92324ec89d5cc1b32582e30f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.029733] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e86eed1c92324ec89d5cc1b32582e30f [ 692.033042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg d3a3668180094a32a5aeebc256be0fbd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.073605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3a3668180094a32a5aeebc256be0fbd [ 692.074853] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 692.101399] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 692.101529] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 692.101656] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 692.101800] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 692.101948] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 692.102238] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 692.102308] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 692.102463] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 692.102629] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 692.102794] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 692.103207] env[61594]: DEBUG nova.virt.hardware [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 692.104566] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f24ba4-80fb-46b5-a195-b95dde7830a0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.109669] env[61594]: DEBUG nova.policy [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4ee6f0e9339c48fa984086efe0bfde44', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fc3d6b59e1fc476f83e4d63e858ca18c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 692.114526] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4504171a-63f7-4cfd-ad6d-1e13227df584 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.293664] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.294276] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 8ceef7c10b2b47509b823cd20a620080 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.308788] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ceef7c10b2b47509b823cd20a620080 [ 692.309571] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 692.309854] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 692.310061] env[61594]: DEBUG nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 692.310235] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 692.429739] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 692.430378] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg a8366ae43d904050aaee141396981f7f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.437846] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8366ae43d904050aaee141396981f7f [ 692.438394] env[61594]: DEBUG nova.network.neutron [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.438847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg f01452135755431f97c61e49fb521342 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.448042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f01452135755431f97c61e49fb521342 [ 692.448464] env[61594]: INFO nova.compute.manager [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: f2b1637b-6a4b-43e7-bd5d-d6f33abd9867] Took 0.14 seconds to deallocate network for instance. [ 692.450246] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg b12a1cc4ae734899b431a3710eb109d1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.503521] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b12a1cc4ae734899b431a3710eb109d1 [ 692.506314] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 9e5e278c46f64d65825afe6a26ce13d2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.538816] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e5e278c46f64d65825afe6a26ce13d2 [ 692.563521] env[61594]: INFO nova.scheduler.client.report [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Deleted allocations for instance f2b1637b-6a4b-43e7-bd5d-d6f33abd9867 [ 692.569643] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg df0056561a9447a1be56d0b172d5306b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 692.586616] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df0056561a9447a1be56d0b172d5306b [ 692.587159] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4659b6a2-01b5-4b63-89f9-ed4c983c70a3 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "f2b1637b-6a4b-43e7-bd5d-d6f33abd9867" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.802s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 692.741200] env[61594]: DEBUG nova.policy [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2048b07a11f84e9a8ccebf543f504aab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '445252cbff1b49cf8874f3ae2e2e3395', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 693.786818] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Successfully created port: b05ad338-4b4c-4909-b4c5-4725bd07c46b {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 694.878034] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dd6faf635c06497ebb32977750c3d425 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 694.888337] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd6faf635c06497ebb32977750c3d425 [ 695.896059] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Successfully created port: 75516f8b-27d7-431d-ac1d-969b78af4c51 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.046327] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "deb53e97-7f20-47d3-a069-0e435776bad8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.046569] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "deb53e97-7f20-47d3-a069-0e435776bad8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.047082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 02477520fd6c4ebab0402ae83733e30f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.071139] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02477520fd6c4ebab0402ae83733e30f [ 696.071804] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 696.073993] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 6792500eb0bb4ac7ace35a2224318020 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.089021] env[61594]: ERROR nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. [ 696.089021] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 696.089021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 696.089021] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 696.089021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 696.089021] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 696.089021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 696.089021] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 696.089021] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 696.089021] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 696.089021] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 696.089021] env[61594]: ERROR nova.compute.manager raise self.value [ 696.089021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 696.089021] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 696.089021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 696.089021] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 696.089558] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 696.089558] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 696.089558] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. [ 696.089558] env[61594]: ERROR nova.compute.manager [ 696.089558] env[61594]: Traceback (most recent call last): [ 696.089558] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 696.089558] env[61594]: listener.cb(fileno) [ 696.089558] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 696.089558] env[61594]: result = function(*args, **kwargs) [ 696.089558] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 696.089558] env[61594]: return func(*args, **kwargs) [ 696.089558] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 696.089558] env[61594]: raise e [ 696.089558] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 696.089558] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 696.089558] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 696.089558] env[61594]: created_port_ids = self._update_ports_for_instance( [ 696.089558] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 696.089558] env[61594]: with excutils.save_and_reraise_exception(): [ 696.089558] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 696.089558] env[61594]: self.force_reraise() [ 696.089558] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 696.089558] env[61594]: raise self.value [ 696.089558] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 696.089558] env[61594]: updated_port = self._update_port( [ 696.089558] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 696.089558] env[61594]: _ensure_no_port_binding_failure(port) [ 696.089558] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 696.089558] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 696.091262] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. [ 696.091262] env[61594]: Removing descriptor: 20 [ 696.091262] env[61594]: ERROR nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Traceback (most recent call last): [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] yield resources [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self.driver.spawn(context, instance, image_meta, [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 696.091262] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] vm_ref = self.build_virtual_machine(instance, [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] vif_infos = vmwarevif.get_vif_info(self._session, [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] for vif in network_info: [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return self._sync_wrapper(fn, *args, **kwargs) [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self.wait() [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self[:] = self._gt.wait() [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return self._exit_event.wait() [ 696.091636] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] result = hub.switch() [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return self.greenlet.switch() [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] result = function(*args, **kwargs) [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return func(*args, **kwargs) [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] raise e [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] nwinfo = self.network_api.allocate_for_instance( [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 696.092021] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] created_port_ids = self._update_ports_for_instance( [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] with excutils.save_and_reraise_exception(): [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self.force_reraise() [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] raise self.value [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] updated_port = self._update_port( [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] _ensure_no_port_binding_failure(port) [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 696.092399] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] raise exception.PortBindingFailed(port_id=port['id']) [ 696.092773] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] nova.exception.PortBindingFailed: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. [ 696.092773] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] [ 696.092773] env[61594]: INFO nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Terminating instance [ 696.096161] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Acquiring lock "refresh_cache-4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 696.096161] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Acquired lock "refresh_cache-4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.096161] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 696.096161] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg d672fb6b67354a6f853bf20cbee04a1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.112547] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d672fb6b67354a6f853bf20cbee04a1a [ 696.127336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6792500eb0bb4ac7ace35a2224318020 [ 696.152142] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.152355] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.153881] env[61594]: INFO nova.compute.claims [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.156199] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 9bbcd69de9424b5f916ef637524dfe4b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.207709] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bbcd69de9424b5f916ef637524dfe4b [ 696.209666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 834a64e79ef6497182b325596a31b6ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.214479] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 696.226126] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 834a64e79ef6497182b325596a31b6ed [ 696.261669] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "e20b5017-bc1a-41c0-ba4b-83e4df43a53d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.262331] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "e20b5017-bc1a-41c0-ba4b-83e4df43a53d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.262887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 279473b5fe584ff0b9dc27cb6d85223e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.281736] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 279473b5fe584ff0b9dc27cb6d85223e [ 696.282364] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 696.284668] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 449d27b9251e4d4b8d1ebb5356d45847 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.349420] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Successfully created port: 80d114f8-4d5f-41a9-b5ab-bb2d668a8121 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.365464] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 449d27b9251e4d4b8d1ebb5356d45847 [ 696.389113] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 696.419260] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2a2992-3033-4a15-a533-5921295a401f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.427950] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f074768-1845-4639-95d0-66e5e381f065 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.460785] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f2e001-fd48-4141-a8af-c9ef85ccba2b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.468647] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63ed573-096c-4a40-8d3b-08991574a61e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.482296] env[61594]: DEBUG nova.compute.provider_tree [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.482810] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 0a21aa7b0875429f9b8dfd6c925edbb1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.491167] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a21aa7b0875429f9b8dfd6c925edbb1 [ 696.492184] env[61594]: DEBUG nova.scheduler.client.report [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 696.494406] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg fa5a07822ee64f04b5aa9fe225e6c1bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.508793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa5a07822ee64f04b5aa9fe225e6c1bb [ 696.509617] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.357s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.510126] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 696.511935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 71d077dd8a734e6db864213db337401f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.512787] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.124s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.514278] env[61594]: INFO nova.compute.claims [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 696.516206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg b137d62dc37e4316a58c99d4ebb4e2b4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.559765] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71d077dd8a734e6db864213db337401f [ 696.561191] env[61594]: DEBUG nova.compute.utils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.561776] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg f3bee758a7844d7eaadae38b196d0dc6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.563023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b137d62dc37e4316a58c99d4ebb4e2b4 [ 696.563562] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 696.563742] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 696.567572] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 4cd1691ce2fd40548473ff680bc472af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.577327] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cd1691ce2fd40548473ff680bc472af [ 696.577863] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3bee758a7844d7eaadae38b196d0dc6 [ 696.582032] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 696.582512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 7bccbed86b5f4b34b46339d529415680 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.643887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bccbed86b5f4b34b46339d529415680 [ 696.647267] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 3e5c69bc4d6847ceae2c3d24c74483ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.691835] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e5c69bc4d6847ceae2c3d24c74483ef [ 696.693642] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 696.743024] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 696.743314] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 696.743473] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 696.743655] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 696.743802] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 696.743953] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 696.744179] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 696.744341] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 696.744509] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 696.744672] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 696.744849] env[61594]: DEBUG nova.virt.hardware [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 696.745851] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80351b77-83ec-4efa-aa3f-6260783b8208 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.749719] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c98659-4d8e-488c-8e8b-09449561ff04 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.759415] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355fad6e-bead-4d5e-8289-db8451bd6a48 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.764338] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bba0a14-26df-47cd-9e02-10574fcb86d8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.807812] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18ae4be-825b-4ab9-a265-fff5f0e5b31e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.818023] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbd5b43-1cc8-462d-9b5a-b1a7c60f3bbe {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.834254] env[61594]: DEBUG nova.compute.provider_tree [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.834928] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 6655df6812544e5b91b92f304ea2ab61 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.846213] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6655df6812544e5b91b92f304ea2ab61 [ 696.846308] env[61594]: DEBUG nova.scheduler.client.report [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 696.848848] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 2152affd53804cd89c19fd2a5122e31a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.870303] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2152affd53804cd89c19fd2a5122e31a [ 696.871997] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.358s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 696.871997] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 696.873798] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 67184b8d9e0d4b20a952f6c2a8308cda in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.916089] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67184b8d9e0d4b20a952f6c2a8308cda [ 696.917255] env[61594]: DEBUG nova.compute.utils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.918151] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg c96b3309c2b14d8997d37b4704acf6eb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.921273] env[61594]: DEBUG nova.policy [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '084fb304b8aa400ebcd3bab4404c36ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a71ad350ba241b5a3f933887cb0e8a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 696.923311] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 696.923311] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 696.931503] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c96b3309c2b14d8997d37b4704acf6eb [ 696.931879] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 696.933935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 196ef1ad3626470dbca82e3a9d63c232 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 696.978713] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 196ef1ad3626470dbca82e3a9d63c232 [ 696.981723] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg d9ec0dc28c6e4e358ba92cfb0e5cefc9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.024483] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9ec0dc28c6e4e358ba92cfb0e5cefc9 [ 697.025696] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 697.051917] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.052306] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.052508] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.052704] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.052948] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.053058] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.053224] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.054221] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.054221] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.054221] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.054221] env[61594]: DEBUG nova.virt.hardware [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.055302] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a1f348-d0a1-495c-91f3-306b23ea83ac {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.066321] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9be812b-84ff-43ae-bded-0358cc897ebd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.128730] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.128730] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 03c66e6bdb4d44849fb0d24d96d0b909 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.136909] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 03c66e6bdb4d44849fb0d24d96d0b909 [ 697.137615] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Releasing lock "refresh_cache-4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 697.141093] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 697.141093] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 697.141093] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-296ac824-573d-43a2-b98b-08343408fa63 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.148243] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d74d23-9e11-4bc7-8f9f-7f74554a05e8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.172723] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2 could not be found. [ 697.172723] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 697.172723] env[61594]: INFO nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Took 0.03 seconds to destroy the instance on the hypervisor. [ 697.172723] env[61594]: DEBUG oslo.service.loopingcall [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 697.172723] env[61594]: DEBUG nova.compute.manager [-] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 697.173022] env[61594]: DEBUG nova.network.neutron [-] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 697.271580] env[61594]: DEBUG nova.network.neutron [-] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.272401] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 78e7c37f488c4aba970000388d87bf9f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.288967] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78e7c37f488c4aba970000388d87bf9f [ 697.288967] env[61594]: DEBUG nova.network.neutron [-] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.288967] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6cff7c6b429b4940be1de48f35a7db0b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.292299] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cff7c6b429b4940be1de48f35a7db0b [ 697.292837] env[61594]: INFO nova.compute.manager [-] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Took 0.12 seconds to deallocate network for instance. [ 697.295860] env[61594]: DEBUG nova.compute.claims [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 697.296058] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 697.296290] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.298245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 44e8e862ba81492fa5c6d496f5289000 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.344494] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44e8e862ba81492fa5c6d496f5289000 [ 697.504221] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d53287-2019-4fae-9a4d-2a1c233149a5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.513243] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6b1e41-1ec3-4eb6-9bf7-62537c4c1830 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.517739] env[61594]: DEBUG nova.policy [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38ed2b39a2d0462095de83291bca4587', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19b2170b4bde4207b5a774f1cd266796', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 697.551087] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1341a7-93c1-4241-a6fd-9aeca52c8327 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.558605] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3406a69-6d6f-49b2-9ec4-b0d410c79a75 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.572035] env[61594]: DEBUG nova.compute.provider_tree [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.572495] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg d4f4321cfde140469a0d9b6de1cc60e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.580818] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4f4321cfde140469a0d9b6de1cc60e0 [ 697.581771] env[61594]: DEBUG nova.scheduler.client.report [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 697.584029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 2eec6dc840414017a9cd3b84d37dae47 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.599968] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2eec6dc840414017a9cd3b84d37dae47 [ 697.600826] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.304s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.601439] env[61594]: ERROR nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Traceback (most recent call last): [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self.driver.spawn(context, instance, image_meta, [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] vm_ref = self.build_virtual_machine(instance, [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.601439] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] for vif in network_info: [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return self._sync_wrapper(fn, *args, **kwargs) [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self.wait() [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self[:] = self._gt.wait() [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return self._exit_event.wait() [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] result = hub.switch() [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.601834] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return self.greenlet.switch() [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] result = function(*args, **kwargs) [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] return func(*args, **kwargs) [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] raise e [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] nwinfo = self.network_api.allocate_for_instance( [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] created_port_ids = self._update_ports_for_instance( [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] with excutils.save_and_reraise_exception(): [ 697.602235] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] self.force_reraise() [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] raise self.value [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] updated_port = self._update_port( [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] _ensure_no_port_binding_failure(port) [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] raise exception.PortBindingFailed(port_id=port['id']) [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] nova.exception.PortBindingFailed: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. [ 697.602690] env[61594]: ERROR nova.compute.manager [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] [ 697.603070] env[61594]: DEBUG nova.compute.utils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.603726] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Build of instance 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2 was re-scheduled: Binding failed for port 9ae290d0-490e-4e1f-b088-547ba11d2802, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 697.604236] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 697.604461] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Acquiring lock "refresh_cache-4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.604606] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Acquired lock "refresh_cache-4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.604768] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.605179] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg ee8aadba40ac4e468b1d652667666696 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 697.613513] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee8aadba40ac4e468b1d652667666696 [ 697.692826] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.585132] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.585132] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 30924cb3ee7e4d58a144cc275e50b42a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 698.598156] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30924cb3ee7e4d58a144cc275e50b42a [ 698.598156] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Releasing lock "refresh_cache-4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.598156] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 698.598156] env[61594]: DEBUG nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 698.598156] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 698.679219] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.679219] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 72756fd965e643bbb1459e887ad3ed1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 698.687311] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72756fd965e643bbb1459e887ad3ed1a [ 698.687311] env[61594]: DEBUG nova.network.neutron [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.687311] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 3110dab88fb144df8a2874055fdca464 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 698.696496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3110dab88fb144df8a2874055fdca464 [ 698.700300] env[61594]: INFO nova.compute.manager [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] [instance: 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2] Took 0.10 seconds to deallocate network for instance. [ 698.700300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg fd5241eb693149d092a3a2c5c6e75fdb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 698.740028] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd5241eb693149d092a3a2c5c6e75fdb [ 698.744844] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg a06252f8f38c47b6a5d7f7ccef5767c6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 698.790473] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a06252f8f38c47b6a5d7f7ccef5767c6 [ 698.825299] env[61594]: INFO nova.scheduler.client.report [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Deleted allocations for instance 4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2 [ 698.835815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Expecting reply to msg 926eea14b1614c328ecd3823f0b04082 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 698.856640] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 926eea14b1614c328ecd3823f0b04082 [ 698.857319] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3fe887b7-08ae-4246-9828-305b72996d5b tempest-ServerActionsTestOtherB-276147654 tempest-ServerActionsTestOtherB-276147654-project-member] Lock "4858bdbd-3aa6-4ac6-ab02-cd2d6ff03ea2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.933s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.433437] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Successfully created port: 87352393-9957-4fc9-a5bc-009618ab2125 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.293655] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Successfully created port: e0bc5458-78be-41d3-b081-0861882153df {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 702.199545] env[61594]: ERROR nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. [ 702.199545] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 702.199545] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 702.199545] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 702.199545] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.199545] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 702.199545] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.199545] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 702.199545] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.199545] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 702.199545] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.199545] env[61594]: ERROR nova.compute.manager raise self.value [ 702.199545] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.199545] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 702.199545] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.199545] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 702.200312] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.200312] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 702.200312] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. [ 702.200312] env[61594]: ERROR nova.compute.manager [ 702.200312] env[61594]: Traceback (most recent call last): [ 702.200312] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 702.200312] env[61594]: listener.cb(fileno) [ 702.200312] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 702.200312] env[61594]: result = function(*args, **kwargs) [ 702.200312] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.200312] env[61594]: return func(*args, **kwargs) [ 702.200312] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 702.200312] env[61594]: raise e [ 702.200312] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 702.200312] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 702.200312] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.200312] env[61594]: created_port_ids = self._update_ports_for_instance( [ 702.200312] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.200312] env[61594]: with excutils.save_and_reraise_exception(): [ 702.200312] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.200312] env[61594]: self.force_reraise() [ 702.200312] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.200312] env[61594]: raise self.value [ 702.200312] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.200312] env[61594]: updated_port = self._update_port( [ 702.200312] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.200312] env[61594]: _ensure_no_port_binding_failure(port) [ 702.200312] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.200312] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 702.201206] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. [ 702.201206] env[61594]: Removing descriptor: 23 [ 702.201206] env[61594]: ERROR nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Traceback (most recent call last): [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] yield resources [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self.driver.spawn(context, instance, image_meta, [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.201206] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] vm_ref = self.build_virtual_machine(instance, [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] for vif in network_info: [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return self._sync_wrapper(fn, *args, **kwargs) [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self.wait() [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self[:] = self._gt.wait() [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return self._exit_event.wait() [ 702.205125] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] result = hub.switch() [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return self.greenlet.switch() [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] result = function(*args, **kwargs) [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return func(*args, **kwargs) [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] raise e [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] nwinfo = self.network_api.allocate_for_instance( [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.205395] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] created_port_ids = self._update_ports_for_instance( [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] with excutils.save_and_reraise_exception(): [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self.force_reraise() [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] raise self.value [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] updated_port = self._update_port( [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] _ensure_no_port_binding_failure(port) [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.205646] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] raise exception.PortBindingFailed(port_id=port['id']) [ 702.205896] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] nova.exception.PortBindingFailed: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. [ 702.205896] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] [ 702.205896] env[61594]: INFO nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Terminating instance [ 702.205896] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Acquiring lock "refresh_cache-e42594f1-7bf7-4630-9ebf-950007812a14" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.205896] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Acquired lock "refresh_cache-e42594f1-7bf7-4630-9ebf-950007812a14" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.205896] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 702.206066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 330f179eb03c4dafa25cbbe48af435d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 702.214884] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 330f179eb03c4dafa25cbbe48af435d5 [ 702.323287] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.436478] env[61594]: ERROR nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. [ 702.436478] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 702.436478] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 702.436478] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 702.436478] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.436478] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 702.436478] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.436478] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 702.436478] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.436478] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 702.436478] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.436478] env[61594]: ERROR nova.compute.manager raise self.value [ 702.436478] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.436478] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 702.436478] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.436478] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 702.436885] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.436885] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 702.436885] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. [ 702.436885] env[61594]: ERROR nova.compute.manager [ 702.436885] env[61594]: Traceback (most recent call last): [ 702.436885] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 702.436885] env[61594]: listener.cb(fileno) [ 702.436885] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 702.436885] env[61594]: result = function(*args, **kwargs) [ 702.436885] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.436885] env[61594]: return func(*args, **kwargs) [ 702.436885] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 702.436885] env[61594]: raise e [ 702.436885] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 702.436885] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 702.436885] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.436885] env[61594]: created_port_ids = self._update_ports_for_instance( [ 702.436885] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.436885] env[61594]: with excutils.save_and_reraise_exception(): [ 702.436885] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.436885] env[61594]: self.force_reraise() [ 702.436885] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.436885] env[61594]: raise self.value [ 702.436885] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.436885] env[61594]: updated_port = self._update_port( [ 702.436885] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.436885] env[61594]: _ensure_no_port_binding_failure(port) [ 702.436885] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.436885] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 702.437602] env[61594]: nova.exception.PortBindingFailed: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. [ 702.437602] env[61594]: Removing descriptor: 21 [ 702.437602] env[61594]: ERROR nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Traceback (most recent call last): [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] yield resources [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self.driver.spawn(context, instance, image_meta, [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.437602] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] vm_ref = self.build_virtual_machine(instance, [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] for vif in network_info: [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return self._sync_wrapper(fn, *args, **kwargs) [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self.wait() [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self[:] = self._gt.wait() [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return self._exit_event.wait() [ 702.438063] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] result = hub.switch() [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return self.greenlet.switch() [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] result = function(*args, **kwargs) [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return func(*args, **kwargs) [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] raise e [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] nwinfo = self.network_api.allocate_for_instance( [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.438329] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] created_port_ids = self._update_ports_for_instance( [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] with excutils.save_and_reraise_exception(): [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self.force_reraise() [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] raise self.value [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] updated_port = self._update_port( [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] _ensure_no_port_binding_failure(port) [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.438620] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] raise exception.PortBindingFailed(port_id=port['id']) [ 702.438879] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] nova.exception.PortBindingFailed: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. [ 702.438879] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] [ 702.438879] env[61594]: INFO nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Terminating instance [ 702.439957] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.440426] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.440426] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 702.440707] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 4e9aa1d204684203adcce3d09d9c8f7a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 702.449639] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e9aa1d204684203adcce3d09d9c8f7a [ 702.556340] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.012983] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.014118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 952e6e22bcac46a183b15957cd458f06 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.033873] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 952e6e22bcac46a183b15957cd458f06 [ 703.033873] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.033873] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 703.033873] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 703.033873] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06596e39-84bd-4841-a079-09a51d35e92c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.050916] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e1786e-0005-4b7f-8689-68e2f6d0cefa {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.078035] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba could not be found. [ 703.078035] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 703.078394] env[61594]: INFO nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Took 0.05 seconds to destroy the instance on the hypervisor. [ 703.078767] env[61594]: DEBUG oslo.service.loopingcall [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.081279] env[61594]: DEBUG nova.compute.manager [-] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 703.081279] env[61594]: DEBUG nova.network.neutron [-] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.121499] env[61594]: DEBUG nova.network.neutron [-] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.122089] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 15df8784aa6d4c95a6edfc72aa195a82 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.135395] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15df8784aa6d4c95a6edfc72aa195a82 [ 703.135395] env[61594]: DEBUG nova.network.neutron [-] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.135395] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0c7e2f61b962498d80f17b84c549d8d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.147060] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c7e2f61b962498d80f17b84c549d8d5 [ 703.148494] env[61594]: INFO nova.compute.manager [-] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Took 0.07 seconds to deallocate network for instance. [ 703.151285] env[61594]: DEBUG nova.compute.claims [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 703.151489] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.152036] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.153912] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg fc1b3de914f2493ab305ccc54d4c5899 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.212346] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc1b3de914f2493ab305ccc54d4c5899 [ 703.247337] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Acquiring lock "b3e22378-e257-4e49-9eb2-787b4afd0eb8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.247564] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Lock "b3e22378-e257-4e49-9eb2-787b4afd0eb8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.248074] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg e8d24acd90bf41a6929f6deb918e88f3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.265907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8d24acd90bf41a6929f6deb918e88f3 [ 703.266397] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 703.268477] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 279c337cc51e4883b49699ef695a9fa6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.320896] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 279c337cc51e4883b49699ef695a9fa6 [ 703.343939] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.390067] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Acquiring lock "7b2debf8-278d-443f-aaf7-3ae6c129981c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.391565] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Lock "7b2debf8-278d-443f-aaf7-3ae6c129981c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.392929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg a4afbe0f20d54b6d9ae85cc7590a5ff2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.397173] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f60fd7e-e778-4753-b38e-26c10d1e8cbd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.409097] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1931c3df-3e6c-49bd-b2aa-b94b4994d115 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.415014] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4afbe0f20d54b6d9ae85cc7590a5ff2 [ 703.415554] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 703.417329] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 52cc7270738c43279aacbba9325fc12b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.451872] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d85e142-c4dd-4b50-8a7f-e4647bce3e62 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.460207] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a0df72-c5e1-42e0-aefc-7683184f08b3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.477897] env[61594]: DEBUG nova.compute.provider_tree [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.477897] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d9ae45983f0b4bfab07dd260886bbf06 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.479111] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52cc7270738c43279aacbba9325fc12b [ 703.484014] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.484014] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg f9d55326893145ffa379ce816b01a5ab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.497460] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9ae45983f0b4bfab07dd260886bbf06 [ 703.498040] env[61594]: DEBUG nova.scheduler.client.report [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 703.500367] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 8a771546055140cda927607b63785e78 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.501428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9d55326893145ffa379ce816b01a5ab [ 703.502522] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Releasing lock "refresh_cache-e42594f1-7bf7-4630-9ebf-950007812a14" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.502902] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 703.503107] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 703.506041] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-34f52955-c308-4849-a3ec-bbf129a91c92 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.514920] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a771546055140cda927607b63785e78 [ 703.516080] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.364s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.516427] env[61594]: ERROR nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Traceback (most recent call last): [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self.driver.spawn(context, instance, image_meta, [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] vm_ref = self.build_virtual_machine(instance, [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 703.516427] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] for vif in network_info: [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return self._sync_wrapper(fn, *args, **kwargs) [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self.wait() [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self[:] = self._gt.wait() [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return self._exit_event.wait() [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] result = hub.switch() [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 703.516731] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return self.greenlet.switch() [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] result = function(*args, **kwargs) [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] return func(*args, **kwargs) [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] raise e [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] nwinfo = self.network_api.allocate_for_instance( [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] created_port_ids = self._update_ports_for_instance( [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] with excutils.save_and_reraise_exception(): [ 703.517101] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] self.force_reraise() [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] raise self.value [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] updated_port = self._update_port( [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] _ensure_no_port_binding_failure(port) [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] raise exception.PortBindingFailed(port_id=port['id']) [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] nova.exception.PortBindingFailed: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. [ 703.517432] env[61594]: ERROR nova.compute.manager [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] [ 703.517733] env[61594]: DEBUG nova.compute.utils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 703.522115] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d96cc2-f475-4dc8-ae0d-5d91d72ac0ad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.534783] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Build of instance ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba was re-scheduled: Binding failed for port b05ad338-4b4c-4909-b4c5-4725bd07c46b, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 703.535285] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 703.535519] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.535664] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.535826] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 703.536283] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f5da415633104cfcb1c03a69edda0ec6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.537795] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.538071] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.194s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.539808] env[61594]: INFO nova.compute.claims [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.541147] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 687191a1e3bc4b3b990e0d295340cd12 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.559966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5da415633104cfcb1c03a69edda0ec6 [ 703.560752] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e42594f1-7bf7-4630-9ebf-950007812a14 could not be found. [ 703.560993] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 703.561204] env[61594]: INFO nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Took 0.06 seconds to destroy the instance on the hypervisor. [ 703.561456] env[61594]: DEBUG oslo.service.loopingcall [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.564033] env[61594]: DEBUG nova.compute.manager [-] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 703.564133] env[61594]: DEBUG nova.network.neutron [-] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.570240] env[61594]: ERROR nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. [ 703.570240] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 703.570240] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 703.570240] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 703.570240] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 703.570240] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 703.570240] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 703.570240] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 703.570240] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.570240] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 703.570240] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.570240] env[61594]: ERROR nova.compute.manager raise self.value [ 703.570240] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 703.570240] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 703.570240] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.570240] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 703.570632] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.570632] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 703.570632] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. [ 703.570632] env[61594]: ERROR nova.compute.manager [ 703.570632] env[61594]: Traceback (most recent call last): [ 703.570632] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 703.570632] env[61594]: listener.cb(fileno) [ 703.570632] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 703.570632] env[61594]: result = function(*args, **kwargs) [ 703.570632] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 703.570632] env[61594]: return func(*args, **kwargs) [ 703.570632] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 703.570632] env[61594]: raise e [ 703.570632] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 703.570632] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 703.570632] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 703.570632] env[61594]: created_port_ids = self._update_ports_for_instance( [ 703.570632] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 703.570632] env[61594]: with excutils.save_and_reraise_exception(): [ 703.570632] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.570632] env[61594]: self.force_reraise() [ 703.570632] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.570632] env[61594]: raise self.value [ 703.570632] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 703.570632] env[61594]: updated_port = self._update_port( [ 703.570632] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.570632] env[61594]: _ensure_no_port_binding_failure(port) [ 703.570632] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.570632] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 703.571223] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. [ 703.571223] env[61594]: Removing descriptor: 19 [ 703.572120] env[61594]: ERROR nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Traceback (most recent call last): [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] yield resources [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self.driver.spawn(context, instance, image_meta, [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] vm_ref = self.build_virtual_machine(instance, [ 703.572120] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] for vif in network_info: [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return self._sync_wrapper(fn, *args, **kwargs) [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self.wait() [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self[:] = self._gt.wait() [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return self._exit_event.wait() [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 703.572412] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] result = hub.switch() [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return self.greenlet.switch() [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] result = function(*args, **kwargs) [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return func(*args, **kwargs) [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] raise e [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] nwinfo = self.network_api.allocate_for_instance( [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] created_port_ids = self._update_ports_for_instance( [ 703.573070] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] with excutils.save_and_reraise_exception(): [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self.force_reraise() [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] raise self.value [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] updated_port = self._update_port( [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] _ensure_no_port_binding_failure(port) [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] raise exception.PortBindingFailed(port_id=port['id']) [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] nova.exception.PortBindingFailed: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. [ 703.573367] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] [ 703.573694] env[61594]: INFO nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Terminating instance [ 703.574114] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Acquiring lock "refresh_cache-bfa7d6f2-050a-4604-8f9c-250d89b091a5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.574114] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Acquired lock "refresh_cache-bfa7d6f2-050a-4604-8f9c-250d89b091a5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.574114] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 703.574535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 85c91e143ac64a92b6db095c69278907 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.581578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85c91e143ac64a92b6db095c69278907 [ 703.599041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 687191a1e3bc4b3b990e0d295340cd12 [ 703.601775] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg ca84e2696154419f803f41c56929d185 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.612699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca84e2696154419f803f41c56929d185 [ 703.635203] env[61594]: ERROR nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. [ 703.635203] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 703.635203] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 703.635203] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 703.635203] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 703.635203] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 703.635203] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 703.635203] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 703.635203] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.635203] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 703.635203] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.635203] env[61594]: ERROR nova.compute.manager raise self.value [ 703.635203] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 703.635203] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 703.635203] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.635203] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 703.635557] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.635557] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 703.635557] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. [ 703.635557] env[61594]: ERROR nova.compute.manager [ 703.635557] env[61594]: Traceback (most recent call last): [ 703.635557] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 703.635557] env[61594]: listener.cb(fileno) [ 703.635557] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 703.635557] env[61594]: result = function(*args, **kwargs) [ 703.635557] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 703.635557] env[61594]: return func(*args, **kwargs) [ 703.635557] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 703.635557] env[61594]: raise e [ 703.635557] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 703.635557] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 703.635557] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 703.635557] env[61594]: created_port_ids = self._update_ports_for_instance( [ 703.635557] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 703.635557] env[61594]: with excutils.save_and_reraise_exception(): [ 703.635557] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.635557] env[61594]: self.force_reraise() [ 703.635557] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.635557] env[61594]: raise self.value [ 703.635557] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 703.635557] env[61594]: updated_port = self._update_port( [ 703.635557] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.635557] env[61594]: _ensure_no_port_binding_failure(port) [ 703.635557] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.635557] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 703.636158] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. [ 703.636158] env[61594]: Removing descriptor: 17 [ 703.636158] env[61594]: ERROR nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Traceback (most recent call last): [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] yield resources [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self.driver.spawn(context, instance, image_meta, [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 703.636158] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] vm_ref = self.build_virtual_machine(instance, [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] vif_infos = vmwarevif.get_vif_info(self._session, [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] for vif in network_info: [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return self._sync_wrapper(fn, *args, **kwargs) [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self.wait() [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self[:] = self._gt.wait() [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return self._exit_event.wait() [ 703.636415] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] result = hub.switch() [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return self.greenlet.switch() [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] result = function(*args, **kwargs) [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return func(*args, **kwargs) [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] raise e [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] nwinfo = self.network_api.allocate_for_instance( [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 703.636692] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] created_port_ids = self._update_ports_for_instance( [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] with excutils.save_and_reraise_exception(): [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self.force_reraise() [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] raise self.value [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] updated_port = self._update_port( [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] _ensure_no_port_binding_failure(port) [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 703.636959] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] raise exception.PortBindingFailed(port_id=port['id']) [ 703.637228] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] nova.exception.PortBindingFailed: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. [ 703.637228] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] [ 703.637228] env[61594]: INFO nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Terminating instance [ 703.640975] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Acquiring lock "refresh_cache-fc403a94-8ac7-4f73-a0a8-51539dcc47d4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 703.640975] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Acquired lock "refresh_cache-fc403a94-8ac7-4f73-a0a8-51539dcc47d4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.640975] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 703.641077] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 8502a287fc8e448fa56e6f58d5d240fa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.643983] env[61594]: DEBUG nova.network.neutron [-] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.644453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0696220c70ee4032930b53202d4fec92 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.650314] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8502a287fc8e448fa56e6f58d5d240fa [ 703.653296] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.658710] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0696220c70ee4032930b53202d4fec92 [ 703.658710] env[61594]: DEBUG nova.network.neutron [-] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.661641] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4a79dd1e13d74c368b94ccc85cfa4c1f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.677164] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a79dd1e13d74c368b94ccc85cfa4c1f [ 703.677854] env[61594]: INFO nova.compute.manager [-] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Took 0.11 seconds to deallocate network for instance. [ 703.680561] env[61594]: DEBUG nova.compute.claims [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 703.680561] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.691142] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.693693] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.793159] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ea0251-37b7-4aa6-8e0e-9e5461a2e8d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.802642] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73aaa59-d154-4464-a99f-51fb59c0dc40 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.832979] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44809ec3-bd36-400d-8029-ff1e058f1a9c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.840380] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2660b13-9ac5-4a9f-9de5-bda636d419c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.853762] env[61594]: DEBUG nova.compute.provider_tree [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.854326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 333476f5a49c4da8900c75d76a639160 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.864475] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 333476f5a49c4da8900c75d76a639160 [ 703.865468] env[61594]: DEBUG nova.scheduler.client.report [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 703.868151] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg c2c479c861784a5dbe22debff50494cd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.880418] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.881163] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 23d29856273842779f72b08e5e9cae0f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.884408] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2c479c861784a5dbe22debff50494cd [ 703.884710] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.347s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 703.885228] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 703.887044] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 80ea42c8d62e4e0aa1e924c27f83103b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.888079] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.350s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.889933] env[61594]: INFO nova.compute.claims [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.892830] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 639340ec6f0d48a09a796e7a1d9826f5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.893458] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.893847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg b5612f97461548128ac3b15c6a2eb2d4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.894775] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23d29856273842779f72b08e5e9cae0f [ 703.895393] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Releasing lock "refresh_cache-bfa7d6f2-050a-4604-8f9c-250d89b091a5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.895746] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 703.895924] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 703.896406] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-efece023-c4b4-4e1e-a3b0-5c234e818f0e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.908636] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a9c4fd-4dc7-4acc-996f-7321b8c7b488 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.923084] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5612f97461548128ac3b15c6a2eb2d4 [ 703.924246] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Releasing lock "refresh_cache-fc403a94-8ac7-4f73-a0a8-51539dcc47d4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.924246] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 703.924434] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 703.924975] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c5db38c-253f-4425-a4f1-79314cfae9bd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.932085] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bfa7d6f2-050a-4604-8f9c-250d89b091a5 could not be found. [ 703.932085] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 703.932264] env[61594]: INFO nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 703.932562] env[61594]: DEBUG oslo.service.loopingcall [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.933085] env[61594]: DEBUG nova.compute.manager [-] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 703.933188] env[61594]: DEBUG nova.network.neutron [-] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 703.937563] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80ea42c8d62e4e0aa1e924c27f83103b [ 703.939039] env[61594]: DEBUG nova.compute.utils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.939039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg aea76e4228984d7ca1326254e81cc2d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.941878] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5c91f6-89e3-4d93-a2fa-2bc173a7572b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.955664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 639340ec6f0d48a09a796e7a1d9826f5 [ 703.955664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aea76e4228984d7ca1326254e81cc2d7 [ 703.955664] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 703.955807] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 703.959604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 3a81919bce7e4138872a71c61004cf54 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.960248] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 703.961886] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 8dc09d769377497f9158ea8a02a7353c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.967021] env[61594]: DEBUG nova.network.neutron [-] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.967021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a682ae0327bf498db7d41cdec7847d43 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 703.977557] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fc403a94-8ac7-4f73-a0a8-51539dcc47d4 could not be found. [ 703.977796] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 703.977990] env[61594]: INFO nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Took 0.05 seconds to destroy the instance on the hypervisor. [ 703.978235] env[61594]: DEBUG oslo.service.loopingcall [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 703.978729] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a81919bce7e4138872a71c61004cf54 [ 703.979264] env[61594]: DEBUG nova.compute.manager [-] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 703.979357] env[61594]: DEBUG nova.network.neutron [-] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 704.004013] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a682ae0327bf498db7d41cdec7847d43 [ 704.004726] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dc09d769377497f9158ea8a02a7353c [ 704.005453] env[61594]: DEBUG nova.network.neutron [-] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.005941] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg dc7bf3815f184caf85278dd5fc00875e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.009502] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 13423a901cc84c4cb33e7d66cf66662b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.016963] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc7bf3815f184caf85278dd5fc00875e [ 704.017430] env[61594]: INFO nova.compute.manager [-] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Took 0.08 seconds to deallocate network for instance. [ 704.020218] env[61594]: DEBUG nova.compute.claims [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 704.020770] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.037529] env[61594]: DEBUG nova.network.neutron [-] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.038404] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 90472fe79e244b9c89da8915ff2a7fbe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.052452] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13423a901cc84c4cb33e7d66cf66662b [ 704.052897] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90472fe79e244b9c89da8915ff2a7fbe [ 704.056266] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 704.058716] env[61594]: DEBUG nova.network.neutron [-] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.059966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0596729f0b724d3ab243fb3731b262b4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.069660] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0596729f0b724d3ab243fb3731b262b4 [ 704.070319] env[61594]: INFO nova.compute.manager [-] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Took 0.09 seconds to deallocate network for instance. [ 704.074319] env[61594]: DEBUG nova.compute.claims [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 704.074319] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 704.093627] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.093994] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.094067] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.094229] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.094378] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.094526] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.094734] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.095046] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.095102] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.095271] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.095444] env[61594]: DEBUG nova.virt.hardware [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.096629] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb8abaa-72f5-4c59-abd9-9b1efd537ea6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.108204] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c25f66a-cb1c-4d45-94b6-2c0a091d2978 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.116314] env[61594]: DEBUG nova.policy [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35d0551ce6c241a2b5db99a60c6ce94b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f01800739b6848ce82622704b8ab4b2a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 704.176917] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d19e4ce1-4b4d-4cac-a568-9cc319fbfa38 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.184563] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193e8e93-7d42-4d46-82c2-7a8839f60f8f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.215470] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006c95e0-8cd5-40e7-9a76-9e672be757a6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.223133] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681be89c-f016-4b07-a208-e175e7e8ecd3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.238956] env[61594]: DEBUG nova.compute.provider_tree [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.239487] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 258a49009a4c435d88703fd4fb6feda7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.253720] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 258a49009a4c435d88703fd4fb6feda7 [ 704.254838] env[61594]: DEBUG nova.scheduler.client.report [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 704.257437] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 4676575ef12e4165ac43b25f16f83396 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.274885] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4676575ef12e4165ac43b25f16f83396 [ 704.275950] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.388s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.276587] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 704.278603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 17deea8136ca401cbd1470937e5294f8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.279594] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.599s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.281589] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 71b45ce4e2ba4606be369f7f9960a0c0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.331483] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71b45ce4e2ba4606be369f7f9960a0c0 [ 704.336931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17deea8136ca401cbd1470937e5294f8 [ 704.337389] env[61594]: DEBUG nova.compute.utils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 704.337691] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 225f13898bc54a11bc8d6ee8b2b4292d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.338541] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 704.339364] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 704.352582] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 225f13898bc54a11bc8d6ee8b2b4292d [ 704.353166] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 704.354987] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 7c99ced053b543718f42f8350912e856 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.405586] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c99ced053b543718f42f8350912e856 [ 704.409661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg ad409f3d057a46eea038fa4cb1d56b0d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.424534] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.425055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 9ae505f773594775b0fd54a1068dff4c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.436901] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ae505f773594775b0fd54a1068dff4c [ 704.436901] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 704.436901] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 704.436901] env[61594]: DEBUG nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 704.437194] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 704.471039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad409f3d057a46eea038fa4cb1d56b0d [ 704.471802] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 704.504202] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.504472] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.504639] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.504828] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.504972] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.505421] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.505674] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.505851] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.506032] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.506255] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.506388] env[61594]: DEBUG nova.virt.hardware [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.507312] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0b707c-8c2d-415f-97e6-abc4d1f0c52f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.512375] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51908810-1daa-4ee3-907e-905bf2294205 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.521997] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f791d182-eae4-43f0-b502-99cc8881134d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.526779] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69307b9-ee22-495c-90a6-01f826ddb404 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.570180] env[61594]: DEBUG nova.policy [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '93fabaa5ad29483d870b131eac8d2677', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b1a43347e40844d4a9c866aeabeb5470', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 704.572495] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c7f40d-6ca4-4eb3-9a93-21fe797bf784 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.580431] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60c498a-1e3b-43c4-aeae-3caa2f3abf47 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.594689] env[61594]: DEBUG nova.compute.provider_tree [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.594793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg ff3932a04675406589ec58c67f773af3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.604386] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff3932a04675406589ec58c67f773af3 [ 704.605359] env[61594]: DEBUG nova.scheduler.client.report [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 704.607872] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg c043a6bf275840a3ac97e8aa080fe891 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.626681] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c043a6bf275840a3ac97e8aa080fe891 [ 704.627120] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.348s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.627733] env[61594]: ERROR nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Traceback (most recent call last): [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self.driver.spawn(context, instance, image_meta, [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] vm_ref = self.build_virtual_machine(instance, [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.627733] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] for vif in network_info: [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return self._sync_wrapper(fn, *args, **kwargs) [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self.wait() [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self[:] = self._gt.wait() [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return self._exit_event.wait() [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] result = hub.switch() [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.628127] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return self.greenlet.switch() [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] result = function(*args, **kwargs) [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] return func(*args, **kwargs) [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] raise e [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] nwinfo = self.network_api.allocate_for_instance( [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] created_port_ids = self._update_ports_for_instance( [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] with excutils.save_and_reraise_exception(): [ 704.628570] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] self.force_reraise() [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] raise self.value [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] updated_port = self._update_port( [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] _ensure_no_port_binding_failure(port) [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] raise exception.PortBindingFailed(port_id=port['id']) [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] nova.exception.PortBindingFailed: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. [ 704.628865] env[61594]: ERROR nova.compute.manager [instance: e42594f1-7bf7-4630-9ebf-950007812a14] [ 704.629137] env[61594]: DEBUG nova.compute.utils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 704.629801] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.609s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.631818] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 3ecd29f85bee489194b921b9c2045059 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.633166] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Build of instance e42594f1-7bf7-4630-9ebf-950007812a14 was re-scheduled: Binding failed for port 24b937ea-bba2-4255-a685-87739ea2b89e, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 704.633623] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 704.633849] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Acquiring lock "refresh_cache-e42594f1-7bf7-4630-9ebf-950007812a14" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.633966] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Acquired lock "refresh_cache-e42594f1-7bf7-4630-9ebf-950007812a14" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.634141] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 704.634500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 475382948510450d982ef62fd708fb5d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.643853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 475382948510450d982ef62fd708fb5d [ 704.673107] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ecd29f85bee489194b921b9c2045059 [ 704.697655] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.772202] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.772795] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 8dfcb36ee4864fddbc3cc440d3dcc97f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.781856] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dfcb36ee4864fddbc3cc440d3dcc97f [ 704.782612] env[61594]: DEBUG nova.network.neutron [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.783248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 2863633b39954f608ab8687625a71f76 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.795728] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2863633b39954f608ab8687625a71f76 [ 704.796591] env[61594]: INFO nova.compute.manager [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba] Took 0.36 seconds to deallocate network for instance. [ 704.798091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d11de87a0fb04517852acc4a5a95d0f7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.831779] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793bb883-9765-4af4-8250-29a4d55bb3fa {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.841010] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050a2579-ef33-4b52-9941-524dfe1a70d2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.878362] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d11de87a0fb04517852acc4a5a95d0f7 [ 704.881777] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d13c35f0fdfc40949e9aa76faf668bf3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.883055] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884d2051-021b-4a82-b830-754f7626e46b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.891606] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3314e976-944d-4566-9449-e0dc48910d09 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.909019] env[61594]: DEBUG nova.compute.provider_tree [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.909019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg ceed04a3f0e64616906b017e5a9644fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.915631] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ceed04a3f0e64616906b017e5a9644fc [ 704.916582] env[61594]: DEBUG nova.scheduler.client.report [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 704.918823] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 6e6055308ed24981962598bc1695965a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.922709] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d13c35f0fdfc40949e9aa76faf668bf3 [ 704.935606] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e6055308ed24981962598bc1695965a [ 704.936535] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.937151] env[61594]: ERROR nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Traceback (most recent call last): [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self.driver.spawn(context, instance, image_meta, [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] vm_ref = self.build_virtual_machine(instance, [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.937151] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] for vif in network_info: [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return self._sync_wrapper(fn, *args, **kwargs) [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self.wait() [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self[:] = self._gt.wait() [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return self._exit_event.wait() [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] result = hub.switch() [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.937448] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return self.greenlet.switch() [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] result = function(*args, **kwargs) [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] return func(*args, **kwargs) [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] raise e [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] nwinfo = self.network_api.allocate_for_instance( [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] created_port_ids = self._update_ports_for_instance( [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] with excutils.save_and_reraise_exception(): [ 704.937743] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] self.force_reraise() [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] raise self.value [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] updated_port = self._update_port( [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] _ensure_no_port_binding_failure(port) [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] raise exception.PortBindingFailed(port_id=port['id']) [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] nova.exception.PortBindingFailed: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. [ 704.938043] env[61594]: ERROR nova.compute.manager [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] [ 704.938271] env[61594]: DEBUG nova.compute.utils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 704.938966] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.865s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.941435] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg f551de9a8aa74d7a8e540681c9273172 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.942661] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Build of instance bfa7d6f2-050a-4604-8f9c-250d89b091a5 was re-scheduled: Binding failed for port 75516f8b-27d7-431d-ac1d-969b78af4c51, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 704.943122] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 704.943347] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Acquiring lock "refresh_cache-bfa7d6f2-050a-4604-8f9c-250d89b091a5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.943494] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Acquired lock "refresh_cache-bfa7d6f2-050a-4604-8f9c-250d89b091a5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.943651] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 704.944068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg ed3cdb93a527422797d05584f518fc3e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.953510] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed3cdb93a527422797d05584f518fc3e [ 704.954738] env[61594]: INFO nova.scheduler.client.report [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Deleted allocations for instance ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba [ 704.963259] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg bb34cf5a739843d8907c6cb92dd34016 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 704.977977] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb34cf5a739843d8907c6cb92dd34016 [ 704.978553] env[61594]: DEBUG oslo_concurrency.lockutils [None req-1ee23a03-1461-4cd6-ac45-deb0dcdb6386 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "ec1dcbbc-16df-4965-a61e-6c70e4c6c0ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.904s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.003988] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f551de9a8aa74d7a8e540681c9273172 [ 705.083098] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.155075] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e5eaf08-48b2-4a72-8af5-210aefd2c810 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.163252] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b35e674e-b891-410d-8741-70bec07edea0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.194614] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23306aae-c51d-4ac7-99a9-29492277582e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.202700] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9946e3b1-c218-493c-a38d-550b370269f1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.217980] env[61594]: DEBUG nova.compute.provider_tree [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.218489] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 87012b6ee90d4bb283fc74135835e50c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.229179] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87012b6ee90d4bb283fc74135835e50c [ 705.230223] env[61594]: DEBUG nova.scheduler.client.report [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 705.232698] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 4271f5392eb541cc8f69b348ba1b42fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.249371] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4271f5392eb541cc8f69b348ba1b42fc [ 705.250242] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.311s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.250868] env[61594]: ERROR nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Traceback (most recent call last): [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self.driver.spawn(context, instance, image_meta, [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] vm_ref = self.build_virtual_machine(instance, [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] vif_infos = vmwarevif.get_vif_info(self._session, [ 705.250868] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] for vif in network_info: [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return self._sync_wrapper(fn, *args, **kwargs) [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self.wait() [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self[:] = self._gt.wait() [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return self._exit_event.wait() [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] result = hub.switch() [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 705.251246] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return self.greenlet.switch() [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] result = function(*args, **kwargs) [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] return func(*args, **kwargs) [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] raise e [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] nwinfo = self.network_api.allocate_for_instance( [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] created_port_ids = self._update_ports_for_instance( [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] with excutils.save_and_reraise_exception(): [ 705.251640] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] self.force_reraise() [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] raise self.value [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] updated_port = self._update_port( [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] _ensure_no_port_binding_failure(port) [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] raise exception.PortBindingFailed(port_id=port['id']) [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] nova.exception.PortBindingFailed: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. [ 705.252342] env[61594]: ERROR nova.compute.manager [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] [ 705.255731] env[61594]: DEBUG nova.compute.utils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 705.255731] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Build of instance fc403a94-8ac7-4f73-a0a8-51539dcc47d4 was re-scheduled: Binding failed for port 80d114f8-4d5f-41a9-b5ab-bb2d668a8121, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 705.256012] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 705.256264] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Acquiring lock "refresh_cache-fc403a94-8ac7-4f73-a0a8-51539dcc47d4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.256431] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Acquired lock "refresh_cache-fc403a94-8ac7-4f73-a0a8-51539dcc47d4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.256593] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.256995] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 8dce01bbe32548e8b4c1aa070bd7dcb1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.267841] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8dce01bbe32548e8b4c1aa070bd7dcb1 [ 705.340180] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.428285] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.428830] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 54d581732f454fbb983f47a3c8a1212a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.440575] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54d581732f454fbb983f47a3c8a1212a [ 705.441363] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Releasing lock "refresh_cache-e42594f1-7bf7-4630-9ebf-950007812a14" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.441592] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 705.441785] env[61594]: DEBUG nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 705.441965] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 705.527562] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.528595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg b384406c9e7941598ccf1aa3590e078a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.538776] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b384406c9e7941598ccf1aa3590e078a [ 705.538849] env[61594]: DEBUG nova.network.neutron [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.540141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg f1e4bdad806049b7bf69a88167c2c1a3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.551461] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1e4bdad806049b7bf69a88167c2c1a3 [ 705.551461] env[61594]: INFO nova.compute.manager [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] [instance: e42594f1-7bf7-4630-9ebf-950007812a14] Took 0.11 seconds to deallocate network for instance. [ 705.553535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg b7dbb13f21dd4410815a9b533755b138 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.600972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7dbb13f21dd4410815a9b533755b138 [ 705.604054] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 20f4e1accf3844e7b833b1e6b2172ead in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.605867] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.606325] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg c5e662fe85c647fe93740310e787e0d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.615824] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5e662fe85c647fe93740310e787e0d8 [ 705.616369] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Releasing lock "refresh_cache-bfa7d6f2-050a-4604-8f9c-250d89b091a5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.616577] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 705.616743] env[61594]: DEBUG nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 705.616910] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 705.636019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20f4e1accf3844e7b833b1e6b2172ead [ 705.661759] env[61594]: INFO nova.scheduler.client.report [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Deleted allocations for instance e42594f1-7bf7-4630-9ebf-950007812a14 [ 705.668346] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Expecting reply to msg 612d712193734d5fba27dc8fefe423fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.685280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 612d712193734d5fba27dc8fefe423fc [ 705.686258] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c58d93f3-90fc-409d-9c3a-8bc529b434dd tempest-ServerAddressesTestJSON-475267544 tempest-ServerAddressesTestJSON-475267544-project-member] Lock "e42594f1-7bf7-4630-9ebf-950007812a14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.773s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 705.740306] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.740816] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 2c86f2796507435aa1a735766b5b6fde in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.753440] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c86f2796507435aa1a735766b5b6fde [ 705.753440] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Releasing lock "refresh_cache-fc403a94-8ac7-4f73-a0a8-51539dcc47d4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.753440] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 705.753440] env[61594]: DEBUG nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 705.753440] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 705.810544] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.811540] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg b2184e1c67f344fc84564138ac699bd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.818830] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2184e1c67f344fc84564138ac699bd1 [ 705.819329] env[61594]: DEBUG nova.network.neutron [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.819809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 3b07f739c00345c1b093a73c5a6ee036 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.829712] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b07f739c00345c1b093a73c5a6ee036 [ 705.830307] env[61594]: INFO nova.compute.manager [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] [instance: fc403a94-8ac7-4f73-a0a8-51539dcc47d4] Took 0.08 seconds to deallocate network for instance. [ 705.833131] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg a9ddeba4ee8948369f30ba71e5f2aa36 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.875179] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9ddeba4ee8948369f30ba71e5f2aa36 [ 705.879041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg b4d89eff428a4a68ae66cbda615af58c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.891541] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Successfully created port: 6f14da6b-a787-45eb-a5ad-99eec27448be {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 705.927400] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4d89eff428a4a68ae66cbda615af58c [ 705.940907] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.942172] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 9031f061ec6941619331097fb83b353b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.951516] env[61594]: INFO nova.scheduler.client.report [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Deleted allocations for instance fc403a94-8ac7-4f73-a0a8-51539dcc47d4 [ 705.958958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9031f061ec6941619331097fb83b353b [ 705.959761] env[61594]: DEBUG nova.network.neutron [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.960242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 8b5e50b493bb4c36aa7786f1ba6f4eb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.961306] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Expecting reply to msg 567f68c5e7dd47608fabc2f8e73c6c1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.969816] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b5e50b493bb4c36aa7786f1ba6f4eb4 [ 705.970705] env[61594]: INFO nova.compute.manager [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] [instance: bfa7d6f2-050a-4604-8f9c-250d89b091a5] Took 0.35 seconds to deallocate network for instance. [ 705.972190] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg e5c2d253dc394e4cafcfd1724ac72b07 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 705.985806] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 567f68c5e7dd47608fabc2f8e73c6c1a [ 705.985929] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6a1c52e1-25c8-4ed4-ba72-ee6743bfc1d9 tempest-ServerExternalEventsTest-1272541702 tempest-ServerExternalEventsTest-1272541702-project-member] Lock "fc403a94-8ac7-4f73-a0a8-51539dcc47d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.760s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.019599] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5c2d253dc394e4cafcfd1724ac72b07 [ 706.025549] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 5f874c4e1b0e40068b900c6d837c201e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 706.059379] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f874c4e1b0e40068b900c6d837c201e [ 706.090028] env[61594]: INFO nova.scheduler.client.report [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Deleted allocations for instance bfa7d6f2-050a-4604-8f9c-250d89b091a5 [ 706.099025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Expecting reply to msg 89c88938bb7a4c83a935d953c6473aa7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 706.113493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89c88938bb7a4c83a935d953c6473aa7 [ 706.114180] env[61594]: DEBUG oslo_concurrency.lockutils [None req-708a8604-ec99-4065-8bc7-e241f7063359 tempest-TenantUsagesTestJSON-507762357 tempest-TenantUsagesTestJSON-507762357-project-member] Lock "bfa7d6f2-050a-4604-8f9c-250d89b091a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.917s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.744063] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Successfully created port: aeedbef6-9d53-4e69-9e27-8281b1f12959 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 709.264033] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "8f3227ba-f30a-4725-94d9-ac1d5f1f16de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.265812] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "8f3227ba-f30a-4725-94d9-ac1d5f1f16de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.268177] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 12589bb3d4e445d8a4a09af32419e574 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.284026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12589bb3d4e445d8a4a09af32419e574 [ 709.284026] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 709.287143] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg e1cf1b072ebb4344a1e765f210e73fde in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.348984] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1cf1b072ebb4344a1e765f210e73fde [ 709.384567] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.384839] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 709.386389] env[61594]: INFO nova.compute.claims [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.389439] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg dc70dc9303674e4aa3f7d66c0324f317 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.435097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc70dc9303674e4aa3f7d66c0324f317 [ 709.437360] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 0b0ee7021ab4491985c18728606d2cd5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.448684] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b0ee7021ab4491985c18728606d2cd5 [ 709.558126] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c277beed-bd8e-4b19-b084-cdefaf661f3c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.568095] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabb7800-302f-4b30-abd4-70cd18369dcd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.606277] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5434521b-15e0-4be3-b05a-c715c6d96215 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.616651] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e1e49b-28ec-44e9-9509-321dcda5457a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.630913] env[61594]: DEBUG nova.compute.provider_tree [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.631486] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 755a34bd65854a5abf4ae896b7db69dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.641656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 755a34bd65854a5abf4ae896b7db69dc [ 709.643064] env[61594]: DEBUG nova.scheduler.client.report [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 709.650866] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 3d9ca4c5e1f5486298aedc420000f40e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.662859] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d9ca4c5e1f5486298aedc420000f40e [ 709.664014] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.279s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 709.665677] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 709.666523] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg b656d6d0164c44f8a806282f4b8123b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.719397] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b656d6d0164c44f8a806282f4b8123b1 [ 709.721769] env[61594]: DEBUG nova.compute.utils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.721769] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg c1cd7065b5ba4723af059370e96ad98e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.726420] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 709.727370] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 709.750275] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1cd7065b5ba4723af059370e96ad98e [ 709.750455] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 709.752545] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg eaa869c8381843c3a3a9fcc60fe57d98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.818783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaa869c8381843c3a3a9fcc60fe57d98 [ 709.822033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 0dea1d3d1d3f4bdca674dd9f2b545e74 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 709.857747] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dea1d3d1d3f4bdca674dd9f2b545e74 [ 709.858900] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 709.902197] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 709.903406] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 709.903406] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.903406] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 709.903406] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.907247] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 709.907247] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 709.907247] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 709.907247] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 709.907247] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 709.907492] env[61594]: DEBUG nova.virt.hardware [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 709.909050] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e6e6a4-e7be-487e-a9ba-73fa6ec7b060 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.926891] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cfde07-81be-4950-9746-a1d65cc1611f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.145466] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "b16d0a82-271e-4e37-bfcd-49c3749d16ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.145573] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "b16d0a82-271e-4e37-bfcd-49c3749d16ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.146109] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg dd3a8f19c4c548189a573d0900ddb72e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.158053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd3a8f19c4c548189a573d0900ddb72e [ 710.158572] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 710.161124] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg db4c683b401349e8bb17c7fbfd667f71 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.214285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db4c683b401349e8bb17c7fbfd667f71 [ 710.229804] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.230166] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.232406] env[61594]: INFO nova.compute.claims [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.233396] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg b85e0d71fbe843249502aa430ac12a9e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.256114] env[61594]: DEBUG nova.policy [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee5a21ff43314c1a857f6958056173f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afc5e909ec5c4dd983ece5aa3236910f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 710.276257] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b85e0d71fbe843249502aa430ac12a9e [ 710.278200] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 31f16e95bba74508a3a2263988fc91a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.294189] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31f16e95bba74508a3a2263988fc91a5 [ 710.458231] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a77afc7-4163-4e8e-993b-f81240bc69bb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.470455] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571c452b-c6e9-4d1f-abac-66612c945587 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.503730] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d907993f-e17e-47b2-8599-606b1d8d993e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.511526] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0f9aa6-49e3-419e-afc0-1d1498826085 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.525359] env[61594]: DEBUG nova.compute.provider_tree [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.526109] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg d17d239323514369a6a836441d6ff96f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.535588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d17d239323514369a6a836441d6ff96f [ 710.536804] env[61594]: DEBUG nova.scheduler.client.report [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 710.539473] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 412a9079bdc947e2b59ec4400b64f089 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.550835] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 412a9079bdc947e2b59ec4400b64f089 [ 710.551602] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.322s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.552080] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 710.553684] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg ac78f38e5f1549b8925d11406687c791 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.588482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac78f38e5f1549b8925d11406687c791 [ 710.590071] env[61594]: DEBUG nova.compute.utils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.590672] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg d4ba12f964b64e67aaa4ff60ec17c227 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.592390] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 710.592568] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 710.605225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4ba12f964b64e67aaa4ff60ec17c227 [ 710.605225] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 710.605561] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 5ba26c5cdcb446e391f95b6bab563ce6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.635731] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ba26c5cdcb446e391f95b6bab563ce6 [ 710.639545] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 85a404fc40b34489873d7e72dbb94e08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 710.672642] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85a404fc40b34489873d7e72dbb94e08 [ 710.674267] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 710.708456] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 710.708702] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 710.708889] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.709467] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 710.709662] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.709855] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 710.710113] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 710.710328] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 710.710558] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 710.711279] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 710.711347] env[61594]: DEBUG nova.virt.hardware [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 710.712462] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0b785a-8a47-4ce6-89a8-3d30d197f89e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.724058] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41ecc97-04fd-4fea-8880-abac0b84c6f6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.976268] env[61594]: DEBUG nova.policy [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfb9e86398ee44b7b9e06dcdc29f8ff6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05666a0abf0b433184c14ff43e6e82c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 711.414447] env[61594]: ERROR nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. [ 711.414447] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 711.414447] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 711.414447] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 711.414447] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.414447] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 711.414447] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.414447] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 711.414447] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.414447] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 711.414447] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.414447] env[61594]: ERROR nova.compute.manager raise self.value [ 711.414447] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.414447] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 711.414447] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.414447] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 711.415121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.415121] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 711.415121] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. [ 711.415121] env[61594]: ERROR nova.compute.manager [ 711.415121] env[61594]: Traceback (most recent call last): [ 711.415121] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 711.415121] env[61594]: listener.cb(fileno) [ 711.415121] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 711.415121] env[61594]: result = function(*args, **kwargs) [ 711.415121] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.415121] env[61594]: return func(*args, **kwargs) [ 711.415121] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 711.415121] env[61594]: raise e [ 711.415121] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 711.415121] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 711.415121] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.415121] env[61594]: created_port_ids = self._update_ports_for_instance( [ 711.415121] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.415121] env[61594]: with excutils.save_and_reraise_exception(): [ 711.415121] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.415121] env[61594]: self.force_reraise() [ 711.415121] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.415121] env[61594]: raise self.value [ 711.415121] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.415121] env[61594]: updated_port = self._update_port( [ 711.415121] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.415121] env[61594]: _ensure_no_port_binding_failure(port) [ 711.415121] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.415121] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 711.415813] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. [ 711.415813] env[61594]: Removing descriptor: 22 [ 711.415813] env[61594]: ERROR nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Traceback (most recent call last): [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] yield resources [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self.driver.spawn(context, instance, image_meta, [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.415813] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] vm_ref = self.build_virtual_machine(instance, [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] for vif in network_info: [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return self._sync_wrapper(fn, *args, **kwargs) [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self.wait() [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self[:] = self._gt.wait() [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return self._exit_event.wait() [ 711.416160] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] result = hub.switch() [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return self.greenlet.switch() [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] result = function(*args, **kwargs) [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return func(*args, **kwargs) [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] raise e [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] nwinfo = self.network_api.allocate_for_instance( [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.416467] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] created_port_ids = self._update_ports_for_instance( [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] with excutils.save_and_reraise_exception(): [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self.force_reraise() [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] raise self.value [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] updated_port = self._update_port( [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] _ensure_no_port_binding_failure(port) [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.416859] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] raise exception.PortBindingFailed(port_id=port['id']) [ 711.417155] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] nova.exception.PortBindingFailed: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. [ 711.417155] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] [ 711.417155] env[61594]: INFO nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Terminating instance [ 711.418659] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "refresh_cache-deb53e97-7f20-47d3-a069-0e435776bad8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.418826] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquired lock "refresh_cache-deb53e97-7f20-47d3-a069-0e435776bad8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.418999] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.419453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 0878296038764638b568a1f369b7e2f2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 711.428595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0878296038764638b568a1f369b7e2f2 [ 711.493950] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.616247] env[61594]: WARNING oslo_vmware.rw_handles [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 711.616247] env[61594]: ERROR oslo_vmware.rw_handles [ 711.616661] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 711.619523] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 711.619523] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Copying Virtual Disk [datastore1] vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/7ea4a3c0-8735-447b-a248-3616e7487c3a/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 711.619523] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00a3f64d-700d-4024-ad50-e343de4e7041 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.628571] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Waiting for the task: (returnval){ [ 711.628571] env[61594]: value = "task-1291383" [ 711.628571] env[61594]: _type = "Task" [ 711.628571] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.638977] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Task: {'id': task-1291383, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.811308] env[61594]: ERROR nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. [ 711.811308] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 711.811308] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 711.811308] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 711.811308] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.811308] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 711.811308] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.811308] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 711.811308] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.811308] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 711.811308] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.811308] env[61594]: ERROR nova.compute.manager raise self.value [ 711.811308] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.811308] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 711.811308] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.811308] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 711.811971] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.811971] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 711.811971] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. [ 711.811971] env[61594]: ERROR nova.compute.manager [ 711.811971] env[61594]: Traceback (most recent call last): [ 711.811971] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 711.811971] env[61594]: listener.cb(fileno) [ 711.811971] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 711.811971] env[61594]: result = function(*args, **kwargs) [ 711.811971] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.811971] env[61594]: return func(*args, **kwargs) [ 711.811971] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 711.811971] env[61594]: raise e [ 711.811971] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 711.811971] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 711.811971] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.811971] env[61594]: created_port_ids = self._update_ports_for_instance( [ 711.811971] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.811971] env[61594]: with excutils.save_and_reraise_exception(): [ 711.811971] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.811971] env[61594]: self.force_reraise() [ 711.811971] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.811971] env[61594]: raise self.value [ 711.811971] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.811971] env[61594]: updated_port = self._update_port( [ 711.811971] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.811971] env[61594]: _ensure_no_port_binding_failure(port) [ 711.811971] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.811971] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 711.813135] env[61594]: nova.exception.PortBindingFailed: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. [ 711.813135] env[61594]: Removing descriptor: 24 [ 711.813135] env[61594]: ERROR nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Traceback (most recent call last): [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] yield resources [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self.driver.spawn(context, instance, image_meta, [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.813135] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] vm_ref = self.build_virtual_machine(instance, [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] for vif in network_info: [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return self._sync_wrapper(fn, *args, **kwargs) [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self.wait() [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self[:] = self._gt.wait() [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return self._exit_event.wait() [ 711.813724] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] result = hub.switch() [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return self.greenlet.switch() [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] result = function(*args, **kwargs) [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return func(*args, **kwargs) [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] raise e [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] nwinfo = self.network_api.allocate_for_instance( [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.814593] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] created_port_ids = self._update_ports_for_instance( [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] with excutils.save_and_reraise_exception(): [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self.force_reraise() [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] raise self.value [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] updated_port = self._update_port( [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] _ensure_no_port_binding_failure(port) [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.815126] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] raise exception.PortBindingFailed(port_id=port['id']) [ 711.815596] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] nova.exception.PortBindingFailed: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. [ 711.815596] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] [ 711.815596] env[61594]: INFO nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Terminating instance [ 711.815596] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "refresh_cache-e20b5017-bc1a-41c0-ba4b-83e4df43a53d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.815596] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquired lock "refresh_cache-e20b5017-bc1a-41c0-ba4b-83e4df43a53d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.815596] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.816088] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 295427b33c4e4f95b99e14323b317b98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 711.825571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 295427b33c4e4f95b99e14323b317b98 [ 711.939575] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.081546] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.081860] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg db6178ca6dc348afbc544beb2c9234a2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.092769] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db6178ca6dc348afbc544beb2c9234a2 [ 712.093403] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Releasing lock "refresh_cache-deb53e97-7f20-47d3-a069-0e435776bad8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.093792] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 712.093989] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 712.094510] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9af16554-e437-408e-845a-ae310b47aaf1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.107597] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d821eee-404e-4e1e-bbb3-323e1bba9d01 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.132652] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance deb53e97-7f20-47d3-a069-0e435776bad8 could not be found. [ 712.132891] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 712.133092] env[61594]: INFO nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 712.133352] env[61594]: DEBUG oslo.service.loopingcall [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.140810] env[61594]: DEBUG nova.compute.manager [-] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 712.141009] env[61594]: DEBUG nova.network.neutron [-] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 712.148560] env[61594]: DEBUG oslo_vmware.exceptions [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 712.148709] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.149257] env[61594]: ERROR nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 712.149257] env[61594]: Faults: ['InvalidArgument'] [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Traceback (most recent call last): [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] yield resources [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self.driver.spawn(context, instance, image_meta, [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self._fetch_image_if_missing(context, vi) [ 712.149257] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] image_cache(vi, tmp_image_ds_loc) [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] vm_util.copy_virtual_disk( [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] session._wait_for_task(vmdk_copy_task) [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] return self.wait_for_task(task_ref) [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] return evt.wait() [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] result = hub.switch() [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 712.149589] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] return self.greenlet.switch() [ 712.149957] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 712.149957] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self.f(*self.args, **self.kw) [ 712.149957] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 712.149957] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] raise exceptions.translate_fault(task_info.error) [ 712.149957] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 712.149957] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Faults: ['InvalidArgument'] [ 712.149957] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] [ 712.149957] env[61594]: INFO nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Terminating instance [ 712.151902] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquiring lock "refresh_cache-151fefe2-b70a-4ea5-8b50-08c7968b10fe" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.152080] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquired lock "refresh_cache-151fefe2-b70a-4ea5-8b50-08c7968b10fe" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.152337] env[61594]: DEBUG nova.network.neutron [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 712.152792] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg a1b9d956b0e242ba8cc32245f3abb47b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.164916] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1b9d956b0e242ba8cc32245f3abb47b [ 712.221701] env[61594]: DEBUG nova.network.neutron [-] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.222356] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f4c1eaa76f954818a6f076c134420d1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.231789] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4c1eaa76f954818a6f076c134420d1a [ 712.232269] env[61594]: DEBUG nova.network.neutron [-] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.232706] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9a63a8a1557b4bea8a20e7d5b6a520be in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.247020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a63a8a1557b4bea8a20e7d5b6a520be [ 712.247020] env[61594]: INFO nova.compute.manager [-] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Took 0.11 seconds to deallocate network for instance. [ 712.249388] env[61594]: DEBUG nova.compute.claims [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 712.249698] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.249767] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.251694] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 6a7f7b9399f34585a14c10211c50137f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.303978] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a7f7b9399f34585a14c10211c50137f [ 712.348867] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Successfully created port: e317892b-9c88-4df9-a67b-0acebb4686d3 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.395198] env[61594]: DEBUG nova.network.neutron [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.453532] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac48018-4ce8-4af7-ac8e-4057e4e09681 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.462305] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b27c04-e798-4a15-a9cb-d08282fec062 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.497112] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8673ab-d0e7-4cc3-b682-4886efecb81c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.505860] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf7a399-7fd8-47fa-ac9b-fbb83bfdfd0f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.521282] env[61594]: DEBUG nova.compute.provider_tree [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.521806] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 4e751723066e4cf7972a49e26665a980 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.532815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e751723066e4cf7972a49e26665a980 [ 712.533912] env[61594]: DEBUG nova.scheduler.client.report [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 712.536612] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg d20639408d6e41569159eb19eb6ea445 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.549308] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d20639408d6e41569159eb19eb6ea445 [ 712.550216] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.300s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 712.550832] env[61594]: ERROR nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Traceback (most recent call last): [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self.driver.spawn(context, instance, image_meta, [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] vm_ref = self.build_virtual_machine(instance, [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] vif_infos = vmwarevif.get_vif_info(self._session, [ 712.550832] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] for vif in network_info: [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return self._sync_wrapper(fn, *args, **kwargs) [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self.wait() [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self[:] = self._gt.wait() [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return self._exit_event.wait() [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] result = hub.switch() [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 712.551292] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return self.greenlet.switch() [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] result = function(*args, **kwargs) [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] return func(*args, **kwargs) [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] raise e [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] nwinfo = self.network_api.allocate_for_instance( [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] created_port_ids = self._update_ports_for_instance( [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] with excutils.save_and_reraise_exception(): [ 712.552340] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] self.force_reraise() [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] raise self.value [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] updated_port = self._update_port( [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] _ensure_no_port_binding_failure(port) [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] raise exception.PortBindingFailed(port_id=port['id']) [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] nova.exception.PortBindingFailed: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. [ 712.552922] env[61594]: ERROR nova.compute.manager [instance: deb53e97-7f20-47d3-a069-0e435776bad8] [ 712.553522] env[61594]: DEBUG nova.compute.utils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 712.553522] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Build of instance deb53e97-7f20-47d3-a069-0e435776bad8 was re-scheduled: Binding failed for port 87352393-9957-4fc9-a5bc-009618ab2125, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 712.553644] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 712.553872] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "refresh_cache-deb53e97-7f20-47d3-a069-0e435776bad8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 712.554027] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquired lock "refresh_cache-deb53e97-7f20-47d3-a069-0e435776bad8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.554195] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 712.554596] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 0ca23f435d464e4a8af29d20a7384965 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.560979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ca23f435d464e4a8af29d20a7384965 [ 712.699033] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.699588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg f6b8928c779f48f199615b1ca95938f2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.702821] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.715379] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6b8928c779f48f199615b1ca95938f2 [ 712.716107] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Releasing lock "refresh_cache-e20b5017-bc1a-41c0-ba4b-83e4df43a53d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.716474] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 712.716673] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 712.717246] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2f6cad83-de0d-4f1e-89fe-687c7ed1d5b8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.728519] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23de7f8-4054-4716-ad98-0525b4f7fb4c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.751288] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e20b5017-bc1a-41c0-ba4b-83e4df43a53d could not be found. [ 712.751691] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 712.751732] env[61594]: INFO nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Took 0.04 seconds to destroy the instance on the hypervisor. [ 712.752141] env[61594]: DEBUG oslo.service.loopingcall [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 712.752305] env[61594]: DEBUG nova.compute.manager [-] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 712.752361] env[61594]: DEBUG nova.network.neutron [-] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 712.826819] env[61594]: DEBUG nova.network.neutron [-] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.827441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e483041202e34507a4d580f11aa0dfd0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.836055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e483041202e34507a4d580f11aa0dfd0 [ 712.836521] env[61594]: DEBUG nova.network.neutron [-] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.836945] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3a7f13399b4f4c338ec47a213c467a96 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.846339] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a7f13399b4f4c338ec47a213c467a96 [ 712.846822] env[61594]: INFO nova.compute.manager [-] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Took 0.09 seconds to deallocate network for instance. [ 712.849488] env[61594]: DEBUG nova.compute.claims [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 712.849671] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 712.849911] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 712.852393] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 24c8ccd1e484484e8fbcd21c6fd7354f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 712.899286] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24c8ccd1e484484e8fbcd21c6fd7354f [ 713.048732] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec89bfab-9158-4e32-96fa-1433e6f30f39 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.057776] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6827f39-7aa2-4776-9e52-ec4897c436ad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.101027] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a022936-6877-49bb-bcd3-7d80ea74224c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.107314] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1684e1-df83-439a-b0d8-a02051bf8075 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.123600] env[61594]: DEBUG nova.compute.provider_tree [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 713.124068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg a5ba5c8177fa4c188d9b8e796da53e9f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.139733] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5ba5c8177fa4c188d9b8e796da53e9f [ 713.140646] env[61594]: DEBUG nova.scheduler.client.report [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 713.143391] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 613e5d5c09c445548c9b17b95a90e53a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.160652] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 613e5d5c09c445548c9b17b95a90e53a [ 713.160652] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.309s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.160652] env[61594]: ERROR nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. [ 713.160652] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Traceback (most recent call last): [ 713.160652] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 713.160652] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self.driver.spawn(context, instance, image_meta, [ 713.160652] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 713.160652] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.160652] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] vm_ref = self.build_virtual_machine(instance, [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] for vif in network_info: [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return self._sync_wrapper(fn, *args, **kwargs) [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self.wait() [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self[:] = self._gt.wait() [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 713.161364] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return self._exit_event.wait() [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] result = hub.switch() [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return self.greenlet.switch() [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] result = function(*args, **kwargs) [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] return func(*args, **kwargs) [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] raise e [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] nwinfo = self.network_api.allocate_for_instance( [ 713.161676] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] created_port_ids = self._update_ports_for_instance( [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] with excutils.save_and_reraise_exception(): [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] self.force_reraise() [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] raise self.value [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] updated_port = self._update_port( [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] _ensure_no_port_binding_failure(port) [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.162000] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] raise exception.PortBindingFailed(port_id=port['id']) [ 713.162477] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] nova.exception.PortBindingFailed: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. [ 713.162477] env[61594]: ERROR nova.compute.manager [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] [ 713.162477] env[61594]: DEBUG nova.compute.utils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 713.162477] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Build of instance e20b5017-bc1a-41c0-ba4b-83e4df43a53d was re-scheduled: Binding failed for port e0bc5458-78be-41d3-b081-0861882153df, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 713.162477] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 713.162632] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "refresh_cache-e20b5017-bc1a-41c0-ba4b-83e4df43a53d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.162632] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquired lock "refresh_cache-e20b5017-bc1a-41c0-ba4b-83e4df43a53d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.162632] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.163041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 254ba2a8af1f4d85a9371d378efe10bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.171492] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 254ba2a8af1f4d85a9371d378efe10bc [ 713.177684] env[61594]: DEBUG nova.network.neutron [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.178154] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 42f0763ab89f4ff289f09b719be3cf3b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.184616] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.185107] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 8832472847dc4291b0912fc5ffdda413 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.193931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42f0763ab89f4ff289f09b719be3cf3b [ 713.194272] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Releasing lock "refresh_cache-151fefe2-b70a-4ea5-8b50-08c7968b10fe" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.195026] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 713.195026] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 713.195955] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8b6a081-1976-462e-b280-0dfdaf5607fd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.199431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8832472847dc4291b0912fc5ffdda413 [ 713.200046] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Releasing lock "refresh_cache-deb53e97-7f20-47d3-a069-0e435776bad8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.200308] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 713.200473] env[61594]: DEBUG nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 713.200600] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 713.208352] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 713.208598] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fae458a-9a44-4aaa-8dcf-cd435edd4867 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.236140] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.240360] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 713.240490] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 713.240667] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Deleting the datastore file [datastore1] 151fefe2-b70a-4ea5-8b50-08c7968b10fe {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.240926] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8664d8c2-6083-4562-8867-a724ef52a3eb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.248119] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Waiting for the task: (returnval){ [ 713.248119] env[61594]: value = "task-1291385" [ 713.248119] env[61594]: _type = "Task" [ 713.248119] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.256819] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Task: {'id': task-1291385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.273847] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.275361] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg f5d3e15dd4294cd6a0c386ae80bd4c9f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.289068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5d3e15dd4294cd6a0c386ae80bd4c9f [ 713.289715] env[61594]: DEBUG nova.network.neutron [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.290301] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg cb8330c8cc7148ef9dc51abfc831ec0a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.305513] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb8330c8cc7148ef9dc51abfc831ec0a [ 713.306051] env[61594]: INFO nova.compute.manager [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: deb53e97-7f20-47d3-a069-0e435776bad8] Took 0.11 seconds to deallocate network for instance. [ 713.308855] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 650c7841fb33407c97a56f01d3e9bc9b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.389610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 650c7841fb33407c97a56f01d3e9bc9b [ 713.396112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg aa69cf9ecec346328f199eb3392898fb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.443982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa69cf9ecec346328f199eb3392898fb [ 713.491954] env[61594]: INFO nova.scheduler.client.report [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Deleted allocations for instance deb53e97-7f20-47d3-a069-0e435776bad8 [ 713.499908] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg fd9f8ee81d7c4334b7f7e3dd5e4ed2e5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.501547] env[61594]: ERROR nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. [ 713.501547] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 713.501547] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 713.501547] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 713.501547] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.501547] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 713.501547] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.501547] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 713.501547] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.501547] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 713.501547] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.501547] env[61594]: ERROR nova.compute.manager raise self.value [ 713.501547] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.501547] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 713.501547] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.501547] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 713.502412] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.502412] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 713.502412] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. [ 713.502412] env[61594]: ERROR nova.compute.manager [ 713.502412] env[61594]: Traceback (most recent call last): [ 713.502412] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 713.502412] env[61594]: listener.cb(fileno) [ 713.502412] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 713.502412] env[61594]: result = function(*args, **kwargs) [ 713.502412] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.502412] env[61594]: return func(*args, **kwargs) [ 713.502412] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 713.502412] env[61594]: raise e [ 713.502412] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 713.502412] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 713.502412] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.502412] env[61594]: created_port_ids = self._update_ports_for_instance( [ 713.502412] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.502412] env[61594]: with excutils.save_and_reraise_exception(): [ 713.502412] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.502412] env[61594]: self.force_reraise() [ 713.502412] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.502412] env[61594]: raise self.value [ 713.502412] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.502412] env[61594]: updated_port = self._update_port( [ 713.502412] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.502412] env[61594]: _ensure_no_port_binding_failure(port) [ 713.502412] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.502412] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 713.503054] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. [ 713.503054] env[61594]: Removing descriptor: 19 [ 713.503054] env[61594]: ERROR nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Traceback (most recent call last): [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] yield resources [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self.driver.spawn(context, instance, image_meta, [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.503054] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] vm_ref = self.build_virtual_machine(instance, [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] for vif in network_info: [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return self._sync_wrapper(fn, *args, **kwargs) [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self.wait() [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self[:] = self._gt.wait() [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return self._exit_event.wait() [ 713.503854] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] result = hub.switch() [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return self.greenlet.switch() [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] result = function(*args, **kwargs) [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return func(*args, **kwargs) [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] raise e [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] nwinfo = self.network_api.allocate_for_instance( [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.504190] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] created_port_ids = self._update_ports_for_instance( [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] with excutils.save_and_reraise_exception(): [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self.force_reraise() [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] raise self.value [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] updated_port = self._update_port( [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] _ensure_no_port_binding_failure(port) [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.504500] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] raise exception.PortBindingFailed(port_id=port['id']) [ 713.504786] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] nova.exception.PortBindingFailed: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. [ 713.504786] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] [ 713.504786] env[61594]: INFO nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Terminating instance [ 713.506120] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Acquiring lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.506378] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Acquired lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.506441] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.506808] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 3ab29f1382eb4a2f89d43afc551ed2a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.522783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd9f8ee81d7c4334b7f7e3dd5e4ed2e5 [ 713.523040] env[61594]: DEBUG oslo_concurrency.lockutils [None req-efdb9b24-4111-43e6-8816-67259c4f218c tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "deb53e97-7f20-47d3-a069-0e435776bad8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.476s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.533206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ab29f1382eb4a2f89d43afc551ed2a4 [ 713.691450] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.711761] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Acquiring lock "eb4ab6f2-3815-4bf1-a561-79bcdb74380c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.713985] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Lock "eb4ab6f2-3815-4bf1-a561-79bcdb74380c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.713985] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 4956f96fc6ee4e439e32915dd0906654 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.747240] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4956f96fc6ee4e439e32915dd0906654 [ 713.747812] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 713.750235] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 4e0696f9ec004bbdbd78b6642ed92e32 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.761946] env[61594]: DEBUG oslo_vmware.api [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Task: {'id': task-1291385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039775} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.763124] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 713.763124] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 713.763124] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 713.763124] env[61594]: INFO nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Took 0.57 seconds to destroy the instance on the hypervisor. [ 713.763431] env[61594]: DEBUG oslo.service.loopingcall [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.763757] env[61594]: DEBUG nova.compute.manager [-] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 713.766271] env[61594]: DEBUG nova.compute.claims [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 713.766516] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.766559] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.768500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg be6803e41d634cbb8a4624eb6a256aed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 713.793397] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e0696f9ec004bbdbd78b6642ed92e32 [ 713.815033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be6803e41d634cbb8a4624eb6a256aed [ 713.816716] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 713.948355] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80552943-f7b3-4268-942a-ad6575cc0c21 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.956953] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6aaba89-b736-440d-974c-eea8a31b9183 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.987696] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c6438b-bf2d-4396-9796-1f86e8c8e5cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.995052] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3beb1af9-d6a1-4158-bb4c-b012db945169 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.008186] env[61594]: DEBUG nova.compute.provider_tree [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.008929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 87b2a92de37c4fb0876f6ccce175ac18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.017069] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.017069] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg dfb201c731dd429bb2bcfd939658c342 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.019961] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87b2a92de37c4fb0876f6ccce175ac18 [ 714.020882] env[61594]: DEBUG nova.scheduler.client.report [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 714.023896] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg bb59dd0062794cd788b73b46adce9ebc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.032871] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfb201c731dd429bb2bcfd939658c342 [ 714.033422] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Releasing lock "refresh_cache-e20b5017-bc1a-41c0-ba4b-83e4df43a53d" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.033652] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 714.033865] env[61594]: DEBUG nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 714.034054] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 714.039546] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb59dd0062794cd788b73b46adce9ebc [ 714.040296] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.274s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.040795] env[61594]: ERROR nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 714.040795] env[61594]: Faults: ['InvalidArgument'] [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Traceback (most recent call last): [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self.driver.spawn(context, instance, image_meta, [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self._fetch_image_if_missing(context, vi) [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] image_cache(vi, tmp_image_ds_loc) [ 714.040795] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] vm_util.copy_virtual_disk( [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] session._wait_for_task(vmdk_copy_task) [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] return self.wait_for_task(task_ref) [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] return evt.wait() [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] result = hub.switch() [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] return self.greenlet.switch() [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 714.041370] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] self.f(*self.args, **self.kw) [ 714.041917] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 714.041917] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] raise exceptions.translate_fault(task_info.error) [ 714.041917] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 714.041917] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Faults: ['InvalidArgument'] [ 714.041917] env[61594]: ERROR nova.compute.manager [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] [ 714.041917] env[61594]: DEBUG nova.compute.utils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 714.042569] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.226s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 714.045571] env[61594]: INFO nova.compute.claims [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.047077] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 420c50d57462473da2f227ac0df0b9fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.050179] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Build of instance 151fefe2-b70a-4ea5-8b50-08c7968b10fe was re-scheduled: A specified parameter was not correct: fileType [ 714.050179] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 714.050179] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 714.050179] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquiring lock "refresh_cache-151fefe2-b70a-4ea5-8b50-08c7968b10fe" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.050179] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Acquired lock "refresh_cache-151fefe2-b70a-4ea5-8b50-08c7968b10fe" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.050388] env[61594]: DEBUG nova.network.neutron [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 714.050388] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 5a66296b0c6f4678adb3488357c3b942 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.061140] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a66296b0c6f4678adb3488357c3b942 [ 714.090730] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 420c50d57462473da2f227ac0df0b9fc [ 714.092954] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg b8ff36decabc4ced99556658c29d757a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.099806] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8ff36decabc4ced99556658c29d757a [ 714.115501] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.116085] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg ec59c430de164294857a1c8f0ee87164 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.122793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec59c430de164294857a1c8f0ee87164 [ 714.123435] env[61594]: DEBUG nova.network.neutron [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.123889] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 56a03935f17042bc9a3173a6dedeea08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.132742] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56a03935f17042bc9a3173a6dedeea08 [ 714.133262] env[61594]: INFO nova.compute.manager [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: e20b5017-bc1a-41c0-ba4b-83e4df43a53d] Took 0.10 seconds to deallocate network for instance. [ 714.135034] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 0d01161b5f464f8e926881f7395f87e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.146319] env[61594]: DEBUG nova.network.neutron [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.184390] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d01161b5f464f8e926881f7395f87e0 [ 714.187105] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 5ba71ee684c242e89eab8d3e09454c62 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.231321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ba71ee684c242e89eab8d3e09454c62 [ 714.236490] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ddb9b20-12d7-4992-b505-09322eca65bc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.247035] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d3a1ec-11c1-4ccb-b8cc-d5361fd0c3a3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.278953] env[61594]: INFO nova.scheduler.client.report [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Deleted allocations for instance e20b5017-bc1a-41c0-ba4b-83e4df43a53d [ 714.285139] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7bbf37-f906-4e8c-b8ea-25a6207d7cd7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.288868] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg d490904324e84842999947847725c23b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.296134] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db87530-af6c-4e7d-a1e8-65c2f40a273a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.311754] env[61594]: DEBUG nova.compute.provider_tree [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.312423] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg f1aec49bc2294aaaa7e7a7707b7cd581 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.314436] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d490904324e84842999947847725c23b [ 714.314818] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a46ce3f8-f57d-469c-a545-bbe150e6a998 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "e20b5017-bc1a-41c0-ba4b-83e4df43a53d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.053s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.328043] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1aec49bc2294aaaa7e7a7707b7cd581 [ 714.329303] env[61594]: DEBUG nova.scheduler.client.report [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 714.332020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 4249daf04f0549b4a322bd0d5502b6b9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.347344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4249daf04f0549b4a322bd0d5502b6b9 [ 714.348369] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 714.348886] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 714.350671] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg eeda3a1f6a1345dab9017753007235bf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.390834] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eeda3a1f6a1345dab9017753007235bf [ 714.392280] env[61594]: DEBUG nova.compute.utils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 714.392916] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 567f2007de404c5e8fb12fec3f91381b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 714.393781] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 714.394312] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 714.413805] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 567f2007de404c5e8fb12fec3f91381b [ 714.414456] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 715.135819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg fb16c802538e4da08d51fa3bb2b1ba2e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.137848] env[61594]: DEBUG nova.policy [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59cf18f84b8c41fbb06db5e1e948ae80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8816e27bfe9140da98937bc2501fca42', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 715.139921] env[61594]: DEBUG nova.network.neutron [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.140398] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 0d803081aa5c4b34b05cd2e9d8e915c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.141554] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Successfully created port: 73cedda1-f0ab-4dab-88e0-e194b3a607c2 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 715.143700] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.144159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 096939d3f1f64c378163cabeedd02029 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.150625] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d803081aa5c4b34b05cd2e9d8e915c9 [ 715.151177] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Releasing lock "refresh_cache-151fefe2-b70a-4ea5-8b50-08c7968b10fe" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.151474] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 715.151606] env[61594]: DEBUG nova.compute.manager [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] [instance: 151fefe2-b70a-4ea5-8b50-08c7968b10fe] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 715.153430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg be115492476641079ae64fb8ad86d572 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.158476] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 096939d3f1f64c378163cabeedd02029 [ 715.159072] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Releasing lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.159465] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 715.159654] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 715.160628] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-206b29ec-7b05-4ab0-a875-74f18b82c69a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.173642] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc3c4a0-9b5f-4fa4-bdca-b7a3d12ab3a2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.190387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb16c802538e4da08d51fa3bb2b1ba2e [ 715.196751] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 93e05926daac459ca9e1feeaaa265a3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.198197] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b3e22378-e257-4e49-9eb2-787b4afd0eb8 could not be found. [ 715.198390] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 715.198561] env[61594]: INFO nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 715.198807] env[61594]: DEBUG oslo.service.loopingcall [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 715.199252] env[61594]: DEBUG nova.compute.manager [-] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 715.199341] env[61594]: DEBUG nova.network.neutron [-] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 715.210029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be115492476641079ae64fb8ad86d572 [ 715.213479] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 944fe4f522814514b5716131627ee9dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.230489] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93e05926daac459ca9e1feeaaa265a3a [ 715.231669] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 715.246205] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 944fe4f522814514b5716131627ee9dd [ 715.263276] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 715.263534] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 715.263692] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 715.263882] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 715.264084] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 715.264581] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 715.264581] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 715.264581] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 715.264771] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 715.264908] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 715.265091] env[61594]: DEBUG nova.virt.hardware [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.266270] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61ff6dd-4493-41b0-a1e8-77cc47063800 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.278385] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479fa906-25e9-446b-9a39-92fa1fc7e95c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.282209] env[61594]: DEBUG nova.compute.manager [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Received event network-changed-6f14da6b-a787-45eb-a5ad-99eec27448be {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 715.282397] env[61594]: DEBUG nova.compute.manager [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Refreshing instance network info cache due to event network-changed-6f14da6b-a787-45eb-a5ad-99eec27448be. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 715.282608] env[61594]: DEBUG oslo_concurrency.lockutils [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Acquiring lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.282784] env[61594]: DEBUG oslo_concurrency.lockutils [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Acquired lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.282941] env[61594]: DEBUG nova.network.neutron [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Refreshing network info cache for port 6f14da6b-a787-45eb-a5ad-99eec27448be {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 715.283378] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Expecting reply to msg 0eda22f11b214f77b22f1fd6ba800f64 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.285652] env[61594]: INFO nova.scheduler.client.report [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Deleted allocations for instance 151fefe2-b70a-4ea5-8b50-08c7968b10fe [ 715.292497] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Expecting reply to msg 3c4abe3cc45f4d9dbf0d4627fc338e13 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.303626] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0eda22f11b214f77b22f1fd6ba800f64 [ 715.315482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c4abe3cc45f4d9dbf0d4627fc338e13 [ 715.316225] env[61594]: DEBUG oslo_concurrency.lockutils [None req-573cc642-9354-49ea-8033-f5044093c26e tempest-ServerDiagnosticsV248Test-2019202710 tempest-ServerDiagnosticsV248Test-2019202710-project-member] Lock "151fefe2-b70a-4ea5-8b50-08c7968b10fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.596s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.328785] env[61594]: ERROR nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. [ 715.328785] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 715.328785] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 715.328785] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 715.328785] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 715.328785] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 715.328785] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 715.328785] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 715.328785] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.328785] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 715.328785] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.328785] env[61594]: ERROR nova.compute.manager raise self.value [ 715.328785] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 715.328785] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 715.328785] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.328785] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 715.329253] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.329253] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 715.329253] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. [ 715.329253] env[61594]: ERROR nova.compute.manager [ 715.329253] env[61594]: Traceback (most recent call last): [ 715.329253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 715.329253] env[61594]: listener.cb(fileno) [ 715.329253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 715.329253] env[61594]: result = function(*args, **kwargs) [ 715.329253] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.329253] env[61594]: return func(*args, **kwargs) [ 715.329253] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 715.329253] env[61594]: raise e [ 715.329253] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 715.329253] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 715.329253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 715.329253] env[61594]: created_port_ids = self._update_ports_for_instance( [ 715.329253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 715.329253] env[61594]: with excutils.save_and_reraise_exception(): [ 715.329253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.329253] env[61594]: self.force_reraise() [ 715.329253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.329253] env[61594]: raise self.value [ 715.329253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 715.329253] env[61594]: updated_port = self._update_port( [ 715.329253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.329253] env[61594]: _ensure_no_port_binding_failure(port) [ 715.329253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.329253] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 715.330926] env[61594]: nova.exception.PortBindingFailed: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. [ 715.330926] env[61594]: Removing descriptor: 21 [ 715.330926] env[61594]: ERROR nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Traceback (most recent call last): [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] yield resources [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self.driver.spawn(context, instance, image_meta, [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.330926] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] vm_ref = self.build_virtual_machine(instance, [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] for vif in network_info: [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return self._sync_wrapper(fn, *args, **kwargs) [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self.wait() [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self[:] = self._gt.wait() [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return self._exit_event.wait() [ 715.331237] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] result = hub.switch() [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return self.greenlet.switch() [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] result = function(*args, **kwargs) [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return func(*args, **kwargs) [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] raise e [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] nwinfo = self.network_api.allocate_for_instance( [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 715.331566] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] created_port_ids = self._update_ports_for_instance( [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] with excutils.save_and_reraise_exception(): [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self.force_reraise() [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] raise self.value [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] updated_port = self._update_port( [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] _ensure_no_port_binding_failure(port) [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.332154] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] raise exception.PortBindingFailed(port_id=port['id']) [ 715.332444] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] nova.exception.PortBindingFailed: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. [ 715.332444] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] [ 715.332444] env[61594]: INFO nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Terminating instance [ 715.333118] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Acquiring lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.333118] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Acquired lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.333118] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 715.333383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg f99db5dc6eba405da6240157b3443c2d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.340463] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f99db5dc6eba405da6240157b3443c2d [ 715.404042] env[61594]: DEBUG nova.network.neutron [-] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.404652] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d4bf35584f3a4cb491789b490520154f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.415118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4bf35584f3a4cb491789b490520154f [ 715.415465] env[61594]: DEBUG nova.network.neutron [-] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.415892] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ef58d1b8d84b405e9fd75f05885cbe60 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.426535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef58d1b8d84b405e9fd75f05885cbe60 [ 715.427054] env[61594]: INFO nova.compute.manager [-] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Took 0.23 seconds to deallocate network for instance. [ 715.431801] env[61594]: DEBUG nova.network.neutron [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.437126] env[61594]: DEBUG nova.compute.claims [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 715.437315] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 715.437530] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.439410] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 5b0c5ca12c234cd6bb9594c70a30dfb1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.458066] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.504032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b0c5ca12c234cd6bb9594c70a30dfb1 [ 715.619838] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-356e8ad0-5f18-4c50-9b17-b867d479a8ff {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.629312] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d3bb1b-ed9a-4d39-ba7d-b1b514268bdf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.662236] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a69056a-3aa0-443b-ab48-1e5e436853fb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.670195] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8aecc8f-ad09-4a85-8f52-23ad7257c0f3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.684799] env[61594]: DEBUG nova.compute.provider_tree [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 715.685342] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 7b5b906eb85044f58df550bb416d2ccd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.699516] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b5b906eb85044f58df550bb416d2ccd [ 715.700438] env[61594]: DEBUG nova.scheduler.client.report [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 715.702226] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 4b8c1a0d4cd645af982ed41adb1aefc7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 715.717816] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b8c1a0d4cd645af982ed41adb1aefc7 [ 715.720282] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.281s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.720282] env[61594]: ERROR nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. [ 715.720282] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Traceback (most recent call last): [ 715.720282] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 715.720282] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self.driver.spawn(context, instance, image_meta, [ 715.720282] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 715.720282] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 715.720282] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 715.720282] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] vm_ref = self.build_virtual_machine(instance, [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] vif_infos = vmwarevif.get_vif_info(self._session, [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] for vif in network_info: [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return self._sync_wrapper(fn, *args, **kwargs) [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self.wait() [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self[:] = self._gt.wait() [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return self._exit_event.wait() [ 715.720570] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] result = hub.switch() [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return self.greenlet.switch() [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] result = function(*args, **kwargs) [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] return func(*args, **kwargs) [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] raise e [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] nwinfo = self.network_api.allocate_for_instance( [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 715.720893] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] created_port_ids = self._update_ports_for_instance( [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] with excutils.save_and_reraise_exception(): [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] self.force_reraise() [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] raise self.value [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] updated_port = self._update_port( [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] _ensure_no_port_binding_failure(port) [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 715.721184] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] raise exception.PortBindingFailed(port_id=port['id']) [ 715.722207] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] nova.exception.PortBindingFailed: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. [ 715.722207] env[61594]: ERROR nova.compute.manager [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] [ 715.722207] env[61594]: DEBUG nova.compute.utils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 715.722207] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Build of instance b3e22378-e257-4e49-9eb2-787b4afd0eb8 was re-scheduled: Binding failed for port 6f14da6b-a787-45eb-a5ad-99eec27448be, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 715.722207] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 715.722378] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Acquiring lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.052504] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.053052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg d2fafa56c9c54bd6bfa6fa01792748e6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.063946] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2fafa56c9c54bd6bfa6fa01792748e6 [ 716.064596] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Releasing lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.065011] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 716.065211] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 716.065738] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0cf1c54-92a9-4ea1-bb4f-b97a1ba009f6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.079317] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01eda6f-3244-4849-986a-057dd8e7fdef {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.106098] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7b2debf8-278d-443f-aaf7-3ae6c129981c could not be found. [ 716.106974] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 716.106974] env[61594]: INFO nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 716.107214] env[61594]: DEBUG oslo.service.loopingcall [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 716.107475] env[61594]: DEBUG nova.compute.manager [-] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 716.107596] env[61594]: DEBUG nova.network.neutron [-] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 716.227740] env[61594]: DEBUG nova.network.neutron [-] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.228304] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 78d928597dda443995f23b6f86119999 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.236821] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78d928597dda443995f23b6f86119999 [ 716.237304] env[61594]: DEBUG nova.network.neutron [-] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.237702] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 77380494a66c4826bef3e67ffe7438ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.250046] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77380494a66c4826bef3e67ffe7438ef [ 716.250550] env[61594]: INFO nova.compute.manager [-] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Took 0.14 seconds to deallocate network for instance. [ 716.252745] env[61594]: DEBUG nova.compute.claims [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 716.252926] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.253279] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 716.255526] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 075cfcad856c4f4dbb9d54a2ed95bf0e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.310417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 075cfcad856c4f4dbb9d54a2ed95bf0e [ 716.414117] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca661a9-1baf-45fb-8088-212a9931483e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.425944] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83dd37f9-bcb9-48dc-83d5-475b94987e3f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.461223] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4552c4d1-bb50-4732-b8b5-e28c7a09175f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.469143] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785f25c3-0d38-42f0-884d-637fc7aefb94 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.483265] env[61594]: DEBUG nova.compute.provider_tree [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.483887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 94c5c3cf7619487c8ce7312450811675 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.495055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94c5c3cf7619487c8ce7312450811675 [ 716.496192] env[61594]: DEBUG nova.scheduler.client.report [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 716.498917] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg cc8e9fe18d9f494b9878987e132fc4f0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.516062] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc8e9fe18d9f494b9878987e132fc4f0 [ 716.517010] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.517844] env[61594]: ERROR nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Traceback (most recent call last): [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self.driver.spawn(context, instance, image_meta, [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] vm_ref = self.build_virtual_machine(instance, [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] vif_infos = vmwarevif.get_vif_info(self._session, [ 716.517844] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] for vif in network_info: [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return self._sync_wrapper(fn, *args, **kwargs) [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self.wait() [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self[:] = self._gt.wait() [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return self._exit_event.wait() [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] result = hub.switch() [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 716.518180] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return self.greenlet.switch() [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] result = function(*args, **kwargs) [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] return func(*args, **kwargs) [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] raise e [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] nwinfo = self.network_api.allocate_for_instance( [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] created_port_ids = self._update_ports_for_instance( [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] with excutils.save_and_reraise_exception(): [ 716.518489] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] self.force_reraise() [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] raise self.value [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] updated_port = self._update_port( [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] _ensure_no_port_binding_failure(port) [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] raise exception.PortBindingFailed(port_id=port['id']) [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] nova.exception.PortBindingFailed: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. [ 716.518798] env[61594]: ERROR nova.compute.manager [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] [ 716.519374] env[61594]: DEBUG nova.compute.utils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 716.521168] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Build of instance 7b2debf8-278d-443f-aaf7-3ae6c129981c was re-scheduled: Binding failed for port aeedbef6-9d53-4e69-9e27-8281b1f12959, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 716.523057] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 716.523057] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Acquiring lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.523057] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Acquired lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.523057] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.523057] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 1ec6138fc9b14ad59995e5bdad0ef9ea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.532554] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ec6138fc9b14ad59995e5bdad0ef9ea [ 716.549715] env[61594]: DEBUG nova.network.neutron [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.550310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Expecting reply to msg a8acecb1aed541c4915e05dfcb1bdd03 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.560856] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8acecb1aed541c4915e05dfcb1bdd03 [ 716.561476] env[61594]: DEBUG oslo_concurrency.lockutils [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Releasing lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.561706] env[61594]: DEBUG nova.compute.manager [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Received event network-changed-aeedbef6-9d53-4e69-9e27-8281b1f12959 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 716.561874] env[61594]: DEBUG nova.compute.manager [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Refreshing instance network info cache due to event network-changed-aeedbef6-9d53-4e69-9e27-8281b1f12959. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 716.562067] env[61594]: DEBUG oslo_concurrency.lockutils [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Acquiring lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 716.562321] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Acquired lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.562492] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 716.562897] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg a5f9c1d1768b4feb8d67957541892aa5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 716.582256] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5f9c1d1768b4feb8d67957541892aa5 [ 716.613512] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 716.717862] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.091344] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.091885] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 80aa7ccba6234ba786d4905ef2833209 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.102591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80aa7ccba6234ba786d4905ef2833209 [ 717.104918] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Releasing lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.104918] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 717.104918] env[61594]: DEBUG nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 717.104918] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 717.106876] env[61594]: DEBUG oslo_concurrency.lockutils [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Acquired lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.107063] env[61594]: DEBUG nova.network.neutron [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Refreshing network info cache for port aeedbef6-9d53-4e69-9e27-8281b1f12959 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 717.107551] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Expecting reply to msg 62739c4159754e8394438a7ef7915b5c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.118798] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62739c4159754e8394438a7ef7915b5c [ 717.198571] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.198571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg e1e229f2a0094cb482f3b4524f6938f8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.205025] env[61594]: DEBUG nova.network.neutron [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.208698] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e1e229f2a0094cb482f3b4524f6938f8 [ 717.209927] env[61594]: DEBUG nova.network.neutron [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.209927] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg ad2ba0a5d9ae441a92b89903bf970f29 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.221804] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad2ba0a5d9ae441a92b89903bf970f29 [ 717.223316] env[61594]: INFO nova.compute.manager [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Took 0.12 seconds to deallocate network for instance. [ 717.225467] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 2e0ededa093848e395c396532fc9de79 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.274458] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e0ededa093848e395c396532fc9de79 [ 717.277981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 798eed103ecc4490a4b8888fd5dd3edb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.318040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 798eed103ecc4490a4b8888fd5dd3edb [ 717.356774] env[61594]: INFO nova.scheduler.client.report [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Deleted allocations for instance 7b2debf8-278d-443f-aaf7-3ae6c129981c [ 717.364789] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Expecting reply to msg 3fff58ba6e6f4297bf9281bc89997e43 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.390780] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fff58ba6e6f4297bf9281bc89997e43 [ 717.391379] env[61594]: DEBUG oslo_concurrency.lockutils [None req-64b5fb14-0080-49e2-98ce-dd286cdd12b6 tempest-ServersTestManualDisk-40187549 tempest-ServersTestManualDisk-40187549-project-member] Lock "7b2debf8-278d-443f-aaf7-3ae6c129981c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.767748] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.768698] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg ae8f115c198845df84db453cd6e98483 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.782495] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae8f115c198845df84db453cd6e98483 [ 717.782495] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Releasing lock "refresh_cache-b3e22378-e257-4e49-9eb2-787b4afd0eb8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.782495] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 717.782495] env[61594]: DEBUG nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 717.782495] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 717.896114] env[61594]: DEBUG nova.network.neutron [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: 7b2debf8-278d-443f-aaf7-3ae6c129981c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.897054] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Expecting reply to msg bf61c26f61a442319c943aa44ed6f1ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.900032] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.900748] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 9b4b4d2dd4bf42948a59727f9536742c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.913028] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b4b4d2dd4bf42948a59727f9536742c [ 717.913625] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf61c26f61a442319c943aa44ed6f1ff [ 717.914231] env[61594]: DEBUG nova.network.neutron [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.914615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 4e53835373564a0b84d9ad2474e4c9d6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.915539] env[61594]: DEBUG oslo_concurrency.lockutils [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] Releasing lock "refresh_cache-7b2debf8-278d-443f-aaf7-3ae6c129981c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 717.915756] env[61594]: DEBUG nova.compute.manager [req-b3e559e0-0e9f-449d-bf05-fb47647a8f96 req-7dd7753d-32f8-4587-b42b-dffbfe5d1c0b service nova] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Received event network-vif-deleted-6f14da6b-a787-45eb-a5ad-99eec27448be {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 717.928523] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e53835373564a0b84d9ad2474e4c9d6 [ 717.928911] env[61594]: INFO nova.compute.manager [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] [instance: b3e22378-e257-4e49-9eb2-787b4afd0eb8] Took 0.15 seconds to deallocate network for instance. [ 717.931538] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 1f38c47b201543f59bf27530d3aa2f86 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 717.982556] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f38c47b201543f59bf27530d3aa2f86 [ 717.986127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 58ea735993044cf8865937910ec0208f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.038935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58ea735993044cf8865937910ec0208f [ 718.077421] env[61594]: INFO nova.scheduler.client.report [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Deleted allocations for instance b3e22378-e257-4e49-9eb2-787b4afd0eb8 [ 718.089458] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Expecting reply to msg 7e2381da2f564d41a54bd906862a2b02 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.116334] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e2381da2f564d41a54bd906862a2b02 [ 718.116960] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b44752ba-6ef4-4792-b6dc-ca0cc2296584 tempest-ServerRescueTestJSONUnderV235-863282302 tempest-ServerRescueTestJSONUnderV235-863282302-project-member] Lock "b3e22378-e257-4e49-9eb2-787b4afd0eb8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.869s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.239084] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.239084] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.239084] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg eeca1661d361415dbc217b93f848c5f7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.250128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eeca1661d361415dbc217b93f848c5f7 [ 718.250128] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 718.251453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 9ec6bf2cf95040ef9053f9073f2c5d6f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.302417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ec6bf2cf95040ef9053f9073f2c5d6f [ 718.324414] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 718.325097] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 718.326796] env[61594]: INFO nova.compute.claims [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 718.329735] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d0728dd2fda343638d7cc3552e1e75b8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.402482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0728dd2fda343638d7cc3552e1e75b8 [ 718.403419] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 728a171e28e146c5b83485484fe7b6ae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.415038] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 728a171e28e146c5b83485484fe7b6ae [ 718.526441] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc6d1c8-c0f5-4250-a2d2-a7d0f0db61ce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.535168] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2434df60-646f-4263-a268-652d60498d4a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.565962] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bd0cc8-bc5a-40b7-b767-0e8e2e3ef039 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.573430] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e16752-cb0d-46d7-9ccd-487bc1bf83a9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.578501] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Successfully created port: 9d511783-4632-4be0-822a-27f7347d9f78 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.590906] env[61594]: DEBUG nova.compute.provider_tree [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.591666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 35cfc4c8c84342cc9706201a5c1e4a4c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.601256] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35cfc4c8c84342cc9706201a5c1e4a4c [ 718.602412] env[61594]: DEBUG nova.scheduler.client.report [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 718.605120] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 08ea1c226d24416da956f91d91d5c946 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.618886] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08ea1c226d24416da956f91d91d5c946 [ 718.619899] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.295s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 718.620398] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 718.622836] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg be8fdb07bc2641df8c8bcba19c2de3c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.669405] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be8fdb07bc2641df8c8bcba19c2de3c5 [ 718.671738] env[61594]: DEBUG nova.compute.utils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 718.672472] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 2fcdeb0d595a410cb4d855f49e23bed3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.673915] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 718.674207] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 718.687197] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2fcdeb0d595a410cb4d855f49e23bed3 [ 718.687914] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 718.690376] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ddb99fb344e34dffa63a8fc77fab9978 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.734154] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ddb99fb344e34dffa63a8fc77fab9978 [ 718.736664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 5d022dba5e914379a7308d88e364b214 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 718.772360] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d022dba5e914379a7308d88e364b214 [ 718.774518] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 718.809478] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 718.809746] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 718.809958] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 718.810084] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 718.810698] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 718.810698] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 718.810698] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 718.810698] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 718.810865] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 718.811240] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 718.811481] env[61594]: DEBUG nova.virt.hardware [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 718.812430] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36531155-2edc-4e24-9ad1-480ec20a8ea3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.823105] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d0c133-7112-485d-9d3a-e46b335c0209 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.228076] env[61594]: DEBUG nova.policy [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '789177a2f7be455cadec45cf03d67521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dfb77f12805418eaa6127fc75becec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 720.888305] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Acquiring lock "fd8bf9dd-bed4-4f79-9d5b-80073b19649a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.888684] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Lock "fd8bf9dd-bed4-4f79-9d5b-80073b19649a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.889263] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 3ecf2d9710434557b1d76d4316749d03 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 720.910509] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ecf2d9710434557b1d76d4316749d03 [ 720.911514] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 720.913694] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 07c04749e11b4dd89c347e181e6a9640 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 720.965512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07c04749e11b4dd89c347e181e6a9640 [ 720.989994] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.990299] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.992287] env[61594]: INFO nova.compute.claims [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 720.994158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 75131809b10647d6bc1b7186b63f4bbd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.049190] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75131809b10647d6bc1b7186b63f4bbd [ 721.050664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg e4e61db720784f938da86753e1fd851b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.058663] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4e61db720784f938da86753e1fd851b [ 721.156295] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2773ee-da44-4d1e-afe8-453ce8afeae4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.163044] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4019fc-a7c3-4f66-a8ce-d558125425c8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.202496] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5836cca5-77f4-4503-984e-668e3145a978 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.210814] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd26307-0566-4556-bdb1-fdb6d35c0309 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.224313] env[61594]: DEBUG nova.compute.provider_tree [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 721.224889] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg d5a37c777e40473d8bf44039f84d0ed4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.237622] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5a37c777e40473d8bf44039f84d0ed4 [ 721.238702] env[61594]: DEBUG nova.scheduler.client.report [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 721.241418] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 58ee14538680431692ca4533cc9d03f8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.260397] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 58ee14538680431692ca4533cc9d03f8 [ 721.261246] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.271s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.261726] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 721.263351] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg b9d32e90bea14b7eb86408d3ae73044e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.303228] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9d32e90bea14b7eb86408d3ae73044e [ 721.305998] env[61594]: DEBUG nova.compute.utils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 721.306518] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 8d43a637cbd6412abac95e97298ec87f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.307353] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 721.307533] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 721.320492] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d43a637cbd6412abac95e97298ec87f [ 721.321395] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 721.323376] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 9da001e4682a4cc4bfd9c0be5fcf173f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.373595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9da001e4682a4cc4bfd9c0be5fcf173f [ 721.377377] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg c5ef5d8af34a4886b5b5f3e8c3e8a453 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 721.418776] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5ef5d8af34a4886b5b5f3e8c3e8a453 [ 721.420109] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 721.452937] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 721.453097] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 721.453257] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.453431] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 721.453925] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.453925] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 721.453925] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 721.454131] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 721.454265] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 721.454424] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 721.454597] env[61594]: DEBUG nova.virt.hardware [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 721.455839] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71518548-8140-45d5-98c1-14dcd4a2556c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.464391] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04431354-323f-4275-9aa1-c368d905c92c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.903779] env[61594]: DEBUG nova.policy [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b226f922a774dbba9acbc3b01ebaf3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b239a909024e4ca698dab05075729b28', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 722.094079] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Successfully created port: 66c001d7-317a-4fa7-9e60-ec7751d386f5 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 722.741142] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 32d0e24f1046451fb148129e9be777af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 722.764029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32d0e24f1046451fb148129e9be777af [ 722.764029] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.971198] env[61594]: ERROR nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. [ 722.971198] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 722.971198] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 722.971198] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 722.971198] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 722.971198] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 722.971198] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 722.971198] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 722.971198] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.971198] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 722.971198] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.971198] env[61594]: ERROR nova.compute.manager raise self.value [ 722.971198] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 722.971198] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 722.971198] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.971198] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 722.972071] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.972071] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 722.972071] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. [ 722.972071] env[61594]: ERROR nova.compute.manager [ 722.972071] env[61594]: Traceback (most recent call last): [ 722.972071] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 722.972071] env[61594]: listener.cb(fileno) [ 722.972071] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 722.972071] env[61594]: result = function(*args, **kwargs) [ 722.972071] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 722.972071] env[61594]: return func(*args, **kwargs) [ 722.972071] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 722.972071] env[61594]: raise e [ 722.972071] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 722.972071] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 722.972071] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 722.972071] env[61594]: created_port_ids = self._update_ports_for_instance( [ 722.972071] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 722.972071] env[61594]: with excutils.save_and_reraise_exception(): [ 722.972071] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.972071] env[61594]: self.force_reraise() [ 722.972071] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.972071] env[61594]: raise self.value [ 722.972071] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 722.972071] env[61594]: updated_port = self._update_port( [ 722.972071] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.972071] env[61594]: _ensure_no_port_binding_failure(port) [ 722.972071] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.972071] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 722.973736] env[61594]: nova.exception.PortBindingFailed: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. [ 722.973736] env[61594]: Removing descriptor: 23 [ 722.973736] env[61594]: ERROR nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Traceback (most recent call last): [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] yield resources [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self.driver.spawn(context, instance, image_meta, [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 722.973736] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] vm_ref = self.build_virtual_machine(instance, [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] vif_infos = vmwarevif.get_vif_info(self._session, [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] for vif in network_info: [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return self._sync_wrapper(fn, *args, **kwargs) [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self.wait() [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self[:] = self._gt.wait() [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return self._exit_event.wait() [ 722.974024] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] result = hub.switch() [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return self.greenlet.switch() [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] result = function(*args, **kwargs) [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return func(*args, **kwargs) [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] raise e [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] nwinfo = self.network_api.allocate_for_instance( [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 722.974328] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] created_port_ids = self._update_ports_for_instance( [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] with excutils.save_and_reraise_exception(): [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self.force_reraise() [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] raise self.value [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] updated_port = self._update_port( [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] _ensure_no_port_binding_failure(port) [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 722.974628] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] raise exception.PortBindingFailed(port_id=port['id']) [ 722.974893] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] nova.exception.PortBindingFailed: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. [ 722.974893] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] [ 722.974893] env[61594]: INFO nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Terminating instance [ 722.979676] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-8f3227ba-f30a-4725-94d9-ac1d5f1f16de" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.979676] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-8f3227ba-f30a-4725-94d9-ac1d5f1f16de" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.979676] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 722.980126] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg faa26bfa32e6449890c469d080e801fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 722.990862] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faa26bfa32e6449890c469d080e801fc [ 723.330088] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 723.669613] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Acquiring lock "59555974-e1a8-467a-872e-5d0cd4daffdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.669863] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Lock "59555974-e1a8-467a-872e-5d0cd4daffdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.671354] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg bdb9d54e656349a78a1025c6747d9566 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 723.683615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdb9d54e656349a78a1025c6747d9566 [ 723.686735] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 723.686735] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 48a8e294bbb84170b5c27d1147de19d2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 723.735035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48a8e294bbb84170b5c27d1147de19d2 [ 723.764240] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 723.764510] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.766292] env[61594]: INFO nova.compute.claims [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.768023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 9a76288a725a469eb2da5248f7a2b852 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 723.815054] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a76288a725a469eb2da5248f7a2b852 [ 723.815457] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg da8824646b2d41a682492a8f87d73fb9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 723.829387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da8824646b2d41a682492a8f87d73fb9 [ 723.944117] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdacca90-604c-4996-b637-915051208169 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.951924] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-540b3c79-b398-4037-a802-fdc4d476a2a4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.993280] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269537c7-730c-46a6-8937-f630565d242a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.001709] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079f6b69-ca71-4f7a-ab66-db3332a2a963 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.015267] env[61594]: DEBUG nova.compute.provider_tree [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.015779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg cb364c0abd9140e6b2a83b786b097f81 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.025599] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb364c0abd9140e6b2a83b786b097f81 [ 724.026611] env[61594]: DEBUG nova.scheduler.client.report [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 724.028919] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 7c4178f24f6a42daa6e4cf801e577fb3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.051794] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c4178f24f6a42daa6e4cf801e577fb3 [ 724.051794] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.286s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.051794] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 724.053016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg ca6dcd0c193346678d221c47761aabd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.098032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca6dcd0c193346678d221c47761aabd1 [ 724.098032] env[61594]: DEBUG nova.compute.utils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 724.098032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 876311b9c79d4669911c68d0db69dbb0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.099226] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 724.099338] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 724.110800] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 876311b9c79d4669911c68d0db69dbb0 [ 724.111408] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 724.113068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg b5658866da3b45a9bb576c4335024048 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.119282] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.119799] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 7d55eaadb3cd48dfb296df4c40241525 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.127762] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d55eaadb3cd48dfb296df4c40241525 [ 724.128613] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-8f3227ba-f30a-4725-94d9-ac1d5f1f16de" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.129192] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 724.129515] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 724.130337] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3fc1b602-112c-4b6b-817a-128b41ef9151 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.149626] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ce60c5-cd36-4c87-b9e7-b041153d17f8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.162694] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5658866da3b45a9bb576c4335024048 [ 724.166572] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg d05d45028adb4e6fac67c3da7aa9e9df in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.181233] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8f3227ba-f30a-4725-94d9-ac1d5f1f16de could not be found. [ 724.181497] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 724.181760] env[61594]: INFO nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Took 0.05 seconds to destroy the instance on the hypervisor. [ 724.182046] env[61594]: DEBUG oslo.service.loopingcall [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.182312] env[61594]: DEBUG nova.compute.manager [-] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 724.182412] env[61594]: DEBUG nova.network.neutron [-] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.204225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d05d45028adb4e6fac67c3da7aa9e9df [ 724.205747] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 724.233974] env[61594]: ERROR nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. [ 724.233974] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 724.233974] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 724.233974] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 724.233974] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 724.233974] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 724.233974] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 724.233974] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 724.233974] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.233974] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 724.233974] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.233974] env[61594]: ERROR nova.compute.manager raise self.value [ 724.233974] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 724.233974] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 724.233974] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.233974] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 724.234470] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.234470] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 724.234470] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. [ 724.234470] env[61594]: ERROR nova.compute.manager [ 724.234470] env[61594]: Traceback (most recent call last): [ 724.234470] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 724.234470] env[61594]: listener.cb(fileno) [ 724.234470] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 724.234470] env[61594]: result = function(*args, **kwargs) [ 724.234470] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 724.234470] env[61594]: return func(*args, **kwargs) [ 724.234470] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 724.234470] env[61594]: raise e [ 724.234470] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 724.234470] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 724.234470] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 724.234470] env[61594]: created_port_ids = self._update_ports_for_instance( [ 724.234470] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 724.234470] env[61594]: with excutils.save_and_reraise_exception(): [ 724.234470] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.234470] env[61594]: self.force_reraise() [ 724.234470] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.234470] env[61594]: raise self.value [ 724.234470] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 724.234470] env[61594]: updated_port = self._update_port( [ 724.234470] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.234470] env[61594]: _ensure_no_port_binding_failure(port) [ 724.234470] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.234470] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 724.235169] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. [ 724.235169] env[61594]: Removing descriptor: 20 [ 724.235169] env[61594]: ERROR nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Traceback (most recent call last): [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] yield resources [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self.driver.spawn(context, instance, image_meta, [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.235169] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] vm_ref = self.build_virtual_machine(instance, [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] for vif in network_info: [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return self._sync_wrapper(fn, *args, **kwargs) [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self.wait() [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self[:] = self._gt.wait() [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return self._exit_event.wait() [ 724.235529] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] result = hub.switch() [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return self.greenlet.switch() [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] result = function(*args, **kwargs) [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return func(*args, **kwargs) [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] raise e [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] nwinfo = self.network_api.allocate_for_instance( [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 724.235880] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] created_port_ids = self._update_ports_for_instance( [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] with excutils.save_and_reraise_exception(): [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self.force_reraise() [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] raise self.value [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] updated_port = self._update_port( [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] _ensure_no_port_binding_failure(port) [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.236249] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] raise exception.PortBindingFailed(port_id=port['id']) [ 724.236566] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] nova.exception.PortBindingFailed: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. [ 724.236566] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] [ 724.236566] env[61594]: INFO nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Terminating instance [ 724.238802] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "refresh_cache-b16d0a82-271e-4e37-bfcd-49c3749d16ac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.238802] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquired lock "refresh_cache-b16d0a82-271e-4e37-bfcd-49c3749d16ac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.238802] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 724.238802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 5e9a892303ce4fe69d50898ffc1c8b02 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.241487] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 724.241709] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 724.241870] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 724.242099] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 724.242282] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 724.242426] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 724.242631] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 724.242788] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 724.242955] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 724.243132] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 724.243307] env[61594]: DEBUG nova.virt.hardware [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 724.244412] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3813fd07-7837-4df1-afb3-7587179838fc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.248406] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e9a892303ce4fe69d50898ffc1c8b02 [ 724.255964] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81779c3-e2e3-40c1-ac00-3c4ac375d948 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.304151] env[61594]: DEBUG nova.network.neutron [-] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.304693] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9827dadeafb248208ba060edde8df21c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.312599] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9827dadeafb248208ba060edde8df21c [ 724.313062] env[61594]: DEBUG nova.network.neutron [-] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.313478] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4c101aecc65e4d98b6e4327246e71e37 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.326184] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c101aecc65e4d98b6e4327246e71e37 [ 724.326679] env[61594]: INFO nova.compute.manager [-] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Took 0.14 seconds to deallocate network for instance. [ 724.329637] env[61594]: DEBUG nova.compute.claims [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 724.329824] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 724.330427] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 724.333310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 6d2aea40d1a84424b373a125f4717e7c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.369763] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.385397] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d2aea40d1a84424b373a125f4717e7c [ 724.500905] env[61594]: DEBUG nova.policy [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50be22513aa7452db8ce44f19f0ba863', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '040cba7015c544f0a1dcc43e37b20351', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 724.530458] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f68ddf-5e55-42e6-8043-195369408857 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.539736] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-636c1103-9c49-4708-bd30-54ef072a8f9c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.580067] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e131e0c-a452-494b-92ad-3bf2b2a191b1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.591122] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d90f0af-af77-43c2-8472-4ee0dea7562f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.604928] env[61594]: DEBUG nova.compute.provider_tree [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.605518] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 7819f84ce3eb409db2c2595aca86e526 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.613656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7819f84ce3eb409db2c2595aca86e526 [ 724.614758] env[61594]: DEBUG nova.scheduler.client.report [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 724.618234] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 6476225f16fc455096ad5501ff4666cf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.637026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6476225f16fc455096ad5501ff4666cf [ 724.637026] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.306s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 724.637162] env[61594]: ERROR nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Traceback (most recent call last): [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self.driver.spawn(context, instance, image_meta, [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] vm_ref = self.build_virtual_machine(instance, [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] vif_infos = vmwarevif.get_vif_info(self._session, [ 724.637162] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] for vif in network_info: [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return self._sync_wrapper(fn, *args, **kwargs) [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self.wait() [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self[:] = self._gt.wait() [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return self._exit_event.wait() [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] result = hub.switch() [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 724.637428] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return self.greenlet.switch() [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] result = function(*args, **kwargs) [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] return func(*args, **kwargs) [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] raise e [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] nwinfo = self.network_api.allocate_for_instance( [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] created_port_ids = self._update_ports_for_instance( [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] with excutils.save_and_reraise_exception(): [ 724.637713] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] self.force_reraise() [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] raise self.value [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] updated_port = self._update_port( [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] _ensure_no_port_binding_failure(port) [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] raise exception.PortBindingFailed(port_id=port['id']) [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] nova.exception.PortBindingFailed: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. [ 724.638122] env[61594]: ERROR nova.compute.manager [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] [ 724.638375] env[61594]: DEBUG nova.compute.utils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 724.639367] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Build of instance 8f3227ba-f30a-4725-94d9-ac1d5f1f16de was re-scheduled: Binding failed for port e317892b-9c88-4df9-a67b-0acebb4686d3, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 724.639796] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 724.640262] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-8f3227ba-f30a-4725-94d9-ac1d5f1f16de" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 724.640262] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-8f3227ba-f30a-4725-94d9-ac1d5f1f16de" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.640399] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 724.640739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 3908032307da430e811e5b32a8d86aa7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.650142] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3908032307da430e811e5b32a8d86aa7 [ 724.723851] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.881406] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.882165] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 6ad7d455c793437889dedd85b32906c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 724.892032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ad7d455c793437889dedd85b32906c3 [ 724.892762] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Releasing lock "refresh_cache-b16d0a82-271e-4e37-bfcd-49c3749d16ac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.893187] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 724.893382] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 724.894094] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6b3d9c51-afe7-4b29-b75c-8490bb790d8b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.903892] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084da831-cd82-43ca-a263-f297fd2652a9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.926139] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b16d0a82-271e-4e37-bfcd-49c3749d16ac could not be found. [ 724.927123] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 724.927123] env[61594]: INFO nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Took 0.03 seconds to destroy the instance on the hypervisor. [ 724.927123] env[61594]: DEBUG oslo.service.loopingcall [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 724.927123] env[61594]: DEBUG nova.compute.manager [-] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 724.927123] env[61594]: DEBUG nova.network.neutron [-] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.935395] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Successfully created port: 7cc4bed0-779d-4ad1-b83a-227f0c2229e4 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.218567] env[61594]: DEBUG nova.network.neutron [-] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 725.219610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 21011dbb778c47cd93e0ffd050dd6d16 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.232712] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21011dbb778c47cd93e0ffd050dd6d16 [ 725.233463] env[61594]: DEBUG nova.network.neutron [-] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.233624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8cc3c9506a5c46e3b11520c3fb1a7d91 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.245884] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cc3c9506a5c46e3b11520c3fb1a7d91 [ 725.247227] env[61594]: INFO nova.compute.manager [-] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Took 0.32 seconds to deallocate network for instance. [ 725.248934] env[61594]: DEBUG nova.compute.claims [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 725.249133] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.249353] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.251174] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 9094d719124043b6bd6720f7c9406ede in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.294095] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9094d719124043b6bd6720f7c9406ede [ 725.385673] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.385673] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg b962bead33674f568c01752043be125e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.400018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b962bead33674f568c01752043be125e [ 725.400212] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-8f3227ba-f30a-4725-94d9-ac1d5f1f16de" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 725.400654] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 725.400980] env[61594]: DEBUG nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 725.401352] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 725.442872] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95c6978-caa8-4d32-b645-ddf92e63c55d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.453356] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cf5a02-6ebf-4418-866a-3827954085b1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.490255] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ee6ed2-83e7-44a2-8082-aa8c3f2e9959 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.498033] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679d6d4d-065c-40d0-b0b3-2e53bdef6c2a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.503483] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 725.504126] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 8aa32369579a4a6192f959d5e94ab633 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.512425] env[61594]: DEBUG nova.compute.provider_tree [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.512951] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 7a4107152986442e8901dabec1322119 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.517763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8aa32369579a4a6192f959d5e94ab633 [ 725.518539] env[61594]: DEBUG nova.network.neutron [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.518688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg d3174c3d4f174055ab125f215f657c3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.523165] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a4107152986442e8901dabec1322119 [ 725.524850] env[61594]: DEBUG nova.scheduler.client.report [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 725.527503] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 256cabefbcfe4d95b3e5bbb2b3816b85 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.532766] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d3174c3d4f174055ab125f215f657c3a [ 725.533326] env[61594]: INFO nova.compute.manager [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 8f3227ba-f30a-4725-94d9-ac1d5f1f16de] Took 0.13 seconds to deallocate network for instance. [ 725.535244] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg d606dd07a04c48099e29d88223be567b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.543018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 256cabefbcfe4d95b3e5bbb2b3816b85 [ 725.543018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.543284] env[61594]: ERROR nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Traceback (most recent call last): [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self.driver.spawn(context, instance, image_meta, [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] vm_ref = self.build_virtual_machine(instance, [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] vif_infos = vmwarevif.get_vif_info(self._session, [ 725.543284] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] for vif in network_info: [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return self._sync_wrapper(fn, *args, **kwargs) [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self.wait() [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self[:] = self._gt.wait() [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return self._exit_event.wait() [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] result = hub.switch() [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 725.543606] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return self.greenlet.switch() [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] result = function(*args, **kwargs) [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] return func(*args, **kwargs) [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] raise e [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] nwinfo = self.network_api.allocate_for_instance( [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] created_port_ids = self._update_ports_for_instance( [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] with excutils.save_and_reraise_exception(): [ 725.543948] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] self.force_reraise() [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] raise self.value [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] updated_port = self._update_port( [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] _ensure_no_port_binding_failure(port) [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] raise exception.PortBindingFailed(port_id=port['id']) [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] nova.exception.PortBindingFailed: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. [ 725.544308] env[61594]: ERROR nova.compute.manager [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] [ 725.544599] env[61594]: DEBUG nova.compute.utils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 725.545904] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Build of instance b16d0a82-271e-4e37-bfcd-49c3749d16ac was re-scheduled: Binding failed for port 73cedda1-f0ab-4dab-88e0-e194b3a607c2, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 725.546355] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 725.546571] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "refresh_cache-b16d0a82-271e-4e37-bfcd-49c3749d16ac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 725.547296] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquired lock "refresh_cache-b16d0a82-271e-4e37-bfcd-49c3749d16ac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 725.547296] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 725.547296] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 7a215d69cc3f435d92987adcbb1d8fc4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.555287] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a215d69cc3f435d92987adcbb1d8fc4 [ 725.597783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d606dd07a04c48099e29d88223be567b [ 725.600809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg ea8d819f15a546df932f6ff1d574cfbb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.633204] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea8d819f15a546df932f6ff1d574cfbb [ 725.656380] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 725.659764] env[61594]: INFO nova.scheduler.client.report [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Deleted allocations for instance 8f3227ba-f30a-4725-94d9-ac1d5f1f16de [ 725.668254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 4b247b4878224b998ed912ecf82d7ca3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 725.680277] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b247b4878224b998ed912ecf82d7ca3 [ 725.680874] env[61594]: DEBUG oslo_concurrency.lockutils [None req-45c2815a-b388-4710-b579-e9e5ab727b2e tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "8f3227ba-f30a-4725-94d9-ac1d5f1f16de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.415s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 726.345516] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.346200] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 8fda14e7a5fb4a90b6e4ac730d614b94 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 726.362365] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fda14e7a5fb4a90b6e4ac730d614b94 [ 726.362582] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Releasing lock "refresh_cache-b16d0a82-271e-4e37-bfcd-49c3749d16ac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 726.362810] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 726.363229] env[61594]: DEBUG nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 726.363229] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 726.451949] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 726.452820] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 90b1da7051aa4079a775bf4fc6cef6f7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 726.460706] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90b1da7051aa4079a775bf4fc6cef6f7 [ 726.462182] env[61594]: DEBUG nova.network.neutron [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.462693] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 64effa866dff432ab85a0875004231da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 726.479021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64effa866dff432ab85a0875004231da [ 726.479021] env[61594]: INFO nova.compute.manager [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: b16d0a82-271e-4e37-bfcd-49c3749d16ac] Took 0.11 seconds to deallocate network for instance. [ 726.484372] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 15a78ac4486349449f088bacc746d3b3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 726.535560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15a78ac4486349449f088bacc746d3b3 [ 726.538911] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 17b3418452704572912704324b89115b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 726.574995] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17b3418452704572912704324b89115b [ 726.608565] env[61594]: INFO nova.scheduler.client.report [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Deleted allocations for instance b16d0a82-271e-4e37-bfcd-49c3749d16ac [ 726.616218] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 36e28f21f46e4e1b90a9b1c01de5a4e5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 726.632447] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36e28f21f46e4e1b90a9b1c01de5a4e5 [ 726.633638] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3adae638-c63b-4063-983a-6aa05bac6f52 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "b16d0a82-271e-4e37-bfcd-49c3749d16ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.487s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.188311] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Successfully created port: fc83359a-f7b2-4614-bff2-cd718a34a194 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 729.599298] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "578faa1c-9edd-4ce3-8a5d-add49367d390" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.599660] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "578faa1c-9edd-4ce3-8a5d-add49367d390" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.599999] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 4317b5be63124c5aacdbce38ed250362 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.626157] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4317b5be63124c5aacdbce38ed250362 [ 729.626767] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 729.628645] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 3ac5a09fd6da47eaa1e068bb2d2794f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.663285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ac5a09fd6da47eaa1e068bb2d2794f4 [ 729.686419] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 729.686673] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 729.688355] env[61594]: INFO nova.compute.claims [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 729.689914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg fbba34bff2a54b9c9285d94b6ba0bae4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.730100] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fbba34bff2a54b9c9285d94b6ba0bae4 [ 729.732039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 8ec07778df1b43a0891f841132b2e507 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.740243] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8ec07778df1b43a0891f841132b2e507 [ 729.828589] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110cfada-433e-4176-adac-ed4d23a233f7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.837746] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66609325-213b-4eac-9b65-c55dce0dac0d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.875974] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb9e4d2-7a80-4f6a-8d44-6da0950bed3b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.884306] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b403823-9869-4275-bb18-9728731b013c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.899610] env[61594]: DEBUG nova.compute.provider_tree [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.900253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg cac1ac0aa0a040dcaf268306fc807663 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.910966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cac1ac0aa0a040dcaf268306fc807663 [ 729.912638] env[61594]: DEBUG nova.scheduler.client.report [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 729.916545] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 559d23c1e4b948cdb0702c3dcad7aca7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.934404] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 559d23c1e4b948cdb0702c3dcad7aca7 [ 729.935293] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.249s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 729.935898] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 729.937651] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 5cb88cb3b2254c66b9528efcf8c16f4f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.987255] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cb88cb3b2254c66b9528efcf8c16f4f [ 729.987255] env[61594]: DEBUG nova.compute.utils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 729.987255] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 3c9c64e407d849e49d50a0a0cca1e27b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 729.989677] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 729.991532] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 730.003055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c9c64e407d849e49d50a0a0cca1e27b [ 730.004352] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 730.005369] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 795c2950d61d475baa2b7a46bc1a378d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 730.047912] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 795c2950d61d475baa2b7a46bc1a378d [ 730.051891] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg d781c53fe72b47e8adaf6864268ec456 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 730.088409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d781c53fe72b47e8adaf6864268ec456 [ 730.090388] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 730.122887] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 730.123160] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 730.123324] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 730.123515] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 730.123663] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 730.123805] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 730.124021] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 730.124214] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 730.124381] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 730.124542] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 730.124711] env[61594]: DEBUG nova.virt.hardware [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 730.125882] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff0327b-191b-4f9f-a448-54dc4782fb00 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.132681] env[61594]: ERROR nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. [ 730.132681] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 730.132681] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 730.132681] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 730.132681] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 730.132681] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 730.132681] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 730.132681] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 730.132681] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.132681] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 730.132681] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.132681] env[61594]: ERROR nova.compute.manager raise self.value [ 730.132681] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 730.132681] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 730.132681] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.132681] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 730.136272] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.136272] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 730.136272] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. [ 730.136272] env[61594]: ERROR nova.compute.manager [ 730.136272] env[61594]: Traceback (most recent call last): [ 730.136272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 730.136272] env[61594]: listener.cb(fileno) [ 730.136272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 730.136272] env[61594]: result = function(*args, **kwargs) [ 730.136272] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 730.136272] env[61594]: return func(*args, **kwargs) [ 730.136272] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 730.136272] env[61594]: raise e [ 730.136272] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 730.136272] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 730.136272] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 730.136272] env[61594]: created_port_ids = self._update_ports_for_instance( [ 730.136272] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 730.136272] env[61594]: with excutils.save_and_reraise_exception(): [ 730.136272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.136272] env[61594]: self.force_reraise() [ 730.136272] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.136272] env[61594]: raise self.value [ 730.136272] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 730.136272] env[61594]: updated_port = self._update_port( [ 730.136272] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.136272] env[61594]: _ensure_no_port_binding_failure(port) [ 730.136272] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.136272] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 730.136988] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. [ 730.136988] env[61594]: Removing descriptor: 24 [ 730.136988] env[61594]: ERROR nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Traceback (most recent call last): [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] yield resources [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self.driver.spawn(context, instance, image_meta, [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 730.136988] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] vm_ref = self.build_virtual_machine(instance, [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] vif_infos = vmwarevif.get_vif_info(self._session, [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] for vif in network_info: [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return self._sync_wrapper(fn, *args, **kwargs) [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self.wait() [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self[:] = self._gt.wait() [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return self._exit_event.wait() [ 730.137430] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] result = hub.switch() [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return self.greenlet.switch() [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] result = function(*args, **kwargs) [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return func(*args, **kwargs) [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] raise e [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] nwinfo = self.network_api.allocate_for_instance( [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 730.137938] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] created_port_ids = self._update_ports_for_instance( [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] with excutils.save_and_reraise_exception(): [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self.force_reraise() [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] raise self.value [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] updated_port = self._update_port( [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] _ensure_no_port_binding_failure(port) [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.138249] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] raise exception.PortBindingFailed(port_id=port['id']) [ 730.138574] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] nova.exception.PortBindingFailed: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. [ 730.138574] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] [ 730.138574] env[61594]: INFO nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Terminating instance [ 730.140113] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ce8177-41ca-4651-942c-851bec5a2e55 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.146117] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Acquiring lock "refresh_cache-eb4ab6f2-3815-4bf1-a561-79bcdb74380c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.146117] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Acquired lock "refresh_cache-eb4ab6f2-3815-4bf1-a561-79bcdb74380c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.146117] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 730.146399] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 67ad861529fb43aaad5dcff59e3d0cc4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 730.161214] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67ad861529fb43aaad5dcff59e3d0cc4 [ 730.284665] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.565933] env[61594]: DEBUG nova.policy [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6189f904167c4cfa9fe287573713f17b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7097bed89f7c4ec699866cb25bf49e47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 731.363675] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.363675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg a77a1d344d384ccf85c63a1e4727cc54 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 731.376067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a77a1d344d384ccf85c63a1e4727cc54 [ 731.376744] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Releasing lock "refresh_cache-eb4ab6f2-3815-4bf1-a561-79bcdb74380c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.380019] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 731.380019] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 731.380019] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6044119c-7900-4108-ab62-c0f0f03f1237 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.391828] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609223a6-6782-41a2-bccd-c5690ed67055 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.421942] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eb4ab6f2-3815-4bf1-a561-79bcdb74380c could not be found. [ 731.422042] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 731.424654] env[61594]: INFO nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 731.424654] env[61594]: DEBUG oslo.service.loopingcall [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 731.424654] env[61594]: DEBUG nova.compute.manager [-] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 731.424654] env[61594]: DEBUG nova.network.neutron [-] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.546554] env[61594]: DEBUG nova.network.neutron [-] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.548113] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8bb9808891a54278b07d0c883ce706a8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 731.561996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bb9808891a54278b07d0c883ce706a8 [ 731.563055] env[61594]: DEBUG nova.network.neutron [-] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.563055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 55dc8c507a8340b4974dda43b2ee74b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 731.573607] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55dc8c507a8340b4974dda43b2ee74b1 [ 731.574326] env[61594]: INFO nova.compute.manager [-] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Took 0.15 seconds to deallocate network for instance. [ 731.580366] env[61594]: DEBUG nova.compute.claims [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 731.580556] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 731.580806] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 731.582929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg cdd56d400b9c4819b81afa2f1fb928b4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 731.650820] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdd56d400b9c4819b81afa2f1fb928b4 [ 731.768431] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0636c8c9-589c-4f14-97a5-c894e7bdd8df {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.779898] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8c0f00-c5c7-4116-bc06-71fd6efc5161 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.832021] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ee1dd5-2838-4fea-9068-aa2d3b0c5187 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.842026] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b080a274-5a1e-4dc8-b77e-55c97fa6ded9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.852946] env[61594]: DEBUG nova.compute.provider_tree [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 731.853539] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 4fec539af3f64fa985291d30b7bcddb7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 731.867713] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fec539af3f64fa985291d30b7bcddb7 [ 731.868831] env[61594]: DEBUG nova.scheduler.client.report [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 731.871227] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 4d7ce768a46547b59c109d9df1c699aa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 731.893793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d7ce768a46547b59c109d9df1c699aa [ 731.894736] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.314s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 731.895571] env[61594]: ERROR nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Traceback (most recent call last): [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self.driver.spawn(context, instance, image_meta, [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] vm_ref = self.build_virtual_machine(instance, [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] vif_infos = vmwarevif.get_vif_info(self._session, [ 731.895571] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] for vif in network_info: [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return self._sync_wrapper(fn, *args, **kwargs) [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self.wait() [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self[:] = self._gt.wait() [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return self._exit_event.wait() [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] result = hub.switch() [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 731.895914] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return self.greenlet.switch() [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] result = function(*args, **kwargs) [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] return func(*args, **kwargs) [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] raise e [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] nwinfo = self.network_api.allocate_for_instance( [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] created_port_ids = self._update_ports_for_instance( [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] with excutils.save_and_reraise_exception(): [ 731.896315] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] self.force_reraise() [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] raise self.value [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] updated_port = self._update_port( [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] _ensure_no_port_binding_failure(port) [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] raise exception.PortBindingFailed(port_id=port['id']) [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] nova.exception.PortBindingFailed: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. [ 731.896613] env[61594]: ERROR nova.compute.manager [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] [ 731.896864] env[61594]: DEBUG nova.compute.utils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 731.898988] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Build of instance eb4ab6f2-3815-4bf1-a561-79bcdb74380c was re-scheduled: Binding failed for port 9d511783-4632-4be0-822a-27f7347d9f78, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 731.899439] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 731.899668] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Acquiring lock "refresh_cache-eb4ab6f2-3815-4bf1-a561-79bcdb74380c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 731.899816] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Acquired lock "refresh_cache-eb4ab6f2-3815-4bf1-a561-79bcdb74380c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.899980] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 731.900458] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg d0bc925a8dbb46f586eeffa4b269b54b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 731.909222] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0bc925a8dbb46f586eeffa4b269b54b [ 732.113093] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "e5d9ba62-f701-4c6a-8dbe-1bd401db3343" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.113334] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "e5d9ba62-f701-4c6a-8dbe-1bd401db3343" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.113883] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg fe9dacaa0d7e457eb06258053e5cdf2d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.125984] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe9dacaa0d7e457eb06258053e5cdf2d [ 732.126514] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 732.128217] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 637cbfd6d0164f6c8446cc4bed4059bf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.164966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 637cbfd6d0164f6c8446cc4bed4059bf [ 732.187424] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 732.187682] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 732.189177] env[61594]: INFO nova.compute.claims [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 732.191136] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 099deabb538448e889d92bfe8a8de39e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.229571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 099deabb538448e889d92bfe8a8de39e [ 732.231702] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 152738c62b5b4b06898fa0eed983396e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.240938] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 152738c62b5b4b06898fa0eed983396e [ 732.258602] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 732.353732] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7addc35e-e658-44ca-a3b9-051e7116fb03 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.362260] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62502af0-f393-4a03-9287-b364b9844486 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.397949] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9120e986-6397-41cf-ba57-14d1048706ec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.405774] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80dcd25-bba8-4bff-a5b7-a4b249ca1cdf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.420525] env[61594]: DEBUG nova.compute.provider_tree [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 732.421091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 34d765489d3f4d99b72fdf806b024798 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.430594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34d765489d3f4d99b72fdf806b024798 [ 732.431603] env[61594]: DEBUG nova.scheduler.client.report [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 732.434183] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 0b43d9fddc9042c885374a3ea6a42222 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.456783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b43d9fddc9042c885374a3ea6a42222 [ 732.457460] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.270s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 732.457949] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 732.460294] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 5e192c85213149688347cafee72e29e9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.511935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e192c85213149688347cafee72e29e9 [ 732.513379] env[61594]: DEBUG nova.compute.utils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 732.513964] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 14946460739f42e39a523de618d83550 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.515507] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 732.515677] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 732.529654] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14946460739f42e39a523de618d83550 [ 732.530355] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 732.533131] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 0eb639b75ec441ff80bdd9ac6ab9c377 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.571049] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0eb639b75ec441ff80bdd9ac6ab9c377 [ 732.574373] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 8e3bd7d163974eadbf451e7f06a0b34e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.614745] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e3bd7d163974eadbf451e7f06a0b34e [ 732.615909] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 732.649076] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 732.649279] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 732.649443] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 732.649630] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 732.649813] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 732.649934] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 732.650287] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 732.650459] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 732.650622] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 732.650999] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 732.651478] env[61594]: DEBUG nova.virt.hardware [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 732.652662] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d429dade-1c29-4d19-9a04-1afd8ecd28cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.663843] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0ce45f-1a6f-4621-b84b-3beaada0a37e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.949319] env[61594]: DEBUG nova.policy [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ce2159a0ccb46c89a7574d04142e926', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbc2efe50ed4ae5a5f0cf6f492a20bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 732.979959] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.980657] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg ee89043f118b4384ae6bc5cbd242804a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 732.997380] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee89043f118b4384ae6bc5cbd242804a [ 732.998524] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Releasing lock "refresh_cache-eb4ab6f2-3815-4bf1-a561-79bcdb74380c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 732.998882] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 732.999390] env[61594]: DEBUG nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 732.999605] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 733.076261] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 733.078603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg eaff7167d7b34072994ae18b874ad24e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 733.088986] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaff7167d7b34072994ae18b874ad24e [ 733.089756] env[61594]: DEBUG nova.network.neutron [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.090325] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 68ff2bc58d074ce0a7abeffdfa026bb5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 733.102878] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68ff2bc58d074ce0a7abeffdfa026bb5 [ 733.103829] env[61594]: INFO nova.compute.manager [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] [instance: eb4ab6f2-3815-4bf1-a561-79bcdb74380c] Took 0.10 seconds to deallocate network for instance. [ 733.105210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 11ed49b8b4d04e60afd0744b2163deeb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 733.152694] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11ed49b8b4d04e60afd0744b2163deeb [ 733.155809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg 08244d9b758d41f5b42131e70260cb80 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 733.198637] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08244d9b758d41f5b42131e70260cb80 [ 733.232451] env[61594]: INFO nova.scheduler.client.report [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Deleted allocations for instance eb4ab6f2-3815-4bf1-a561-79bcdb74380c [ 733.244500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Expecting reply to msg b19529f131a04cae85cea5e97ff3d0ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 733.263589] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b19529f131a04cae85cea5e97ff3d0ff [ 733.263589] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0d1a2f85-ac2e-4917-be3f-f8e547a4224d tempest-InstanceActionsTestJSON-1806126258 tempest-InstanceActionsTestJSON-1806126258-project-member] Lock "eb4ab6f2-3815-4bf1-a561-79bcdb74380c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.551s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.421418] env[61594]: ERROR nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. [ 733.421418] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 733.421418] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 733.421418] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 733.421418] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 733.421418] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 733.421418] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 733.421418] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 733.421418] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.421418] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 733.421418] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.421418] env[61594]: ERROR nova.compute.manager raise self.value [ 733.421418] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 733.421418] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 733.421418] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.421418] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 733.422270] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.422270] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 733.422270] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. [ 733.422270] env[61594]: ERROR nova.compute.manager [ 733.422270] env[61594]: Traceback (most recent call last): [ 733.422270] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 733.422270] env[61594]: listener.cb(fileno) [ 733.422270] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 733.422270] env[61594]: result = function(*args, **kwargs) [ 733.422270] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 733.422270] env[61594]: return func(*args, **kwargs) [ 733.422270] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 733.422270] env[61594]: raise e [ 733.422270] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 733.422270] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 733.422270] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 733.422270] env[61594]: created_port_ids = self._update_ports_for_instance( [ 733.422270] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 733.422270] env[61594]: with excutils.save_and_reraise_exception(): [ 733.422270] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.422270] env[61594]: self.force_reraise() [ 733.422270] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.422270] env[61594]: raise self.value [ 733.422270] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 733.422270] env[61594]: updated_port = self._update_port( [ 733.422270] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.422270] env[61594]: _ensure_no_port_binding_failure(port) [ 733.422270] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.422270] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 733.423010] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. [ 733.423010] env[61594]: Removing descriptor: 22 [ 733.423010] env[61594]: ERROR nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Traceback (most recent call last): [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] yield resources [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self.driver.spawn(context, instance, image_meta, [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.423010] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] vm_ref = self.build_virtual_machine(instance, [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] for vif in network_info: [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return self._sync_wrapper(fn, *args, **kwargs) [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self.wait() [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self[:] = self._gt.wait() [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return self._exit_event.wait() [ 733.423936] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] result = hub.switch() [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return self.greenlet.switch() [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] result = function(*args, **kwargs) [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return func(*args, **kwargs) [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] raise e [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] nwinfo = self.network_api.allocate_for_instance( [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 733.424508] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] created_port_ids = self._update_ports_for_instance( [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] with excutils.save_and_reraise_exception(): [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self.force_reraise() [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] raise self.value [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] updated_port = self._update_port( [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] _ensure_no_port_binding_failure(port) [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.425392] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] raise exception.PortBindingFailed(port_id=port['id']) [ 733.426854] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] nova.exception.PortBindingFailed: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. [ 733.426854] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] [ 733.426854] env[61594]: INFO nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Terminating instance [ 733.426854] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.426854] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.426854] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 733.427017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d1b06bf1192541b0b2a2e8931092a520 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 733.440065] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1b06bf1192541b0b2a2e8931092a520 [ 733.497400] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 734.042748] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Successfully created port: b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 734.116666] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.117266] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg bc41552234424cc185957a07349e31c1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 734.128483] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc41552234424cc185957a07349e31c1 [ 734.129617] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.130206] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 734.130450] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 734.131045] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c833cdf0-e9e8-4873-afc7-f74a7d53af85 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.143030] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e257ce59-6315-452c-bfd9-ecf65efbe862 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.172235] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b465e4bb-f9f4-4fcf-97e7-ad38f33918a4 could not be found. [ 734.172235] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 734.172235] env[61594]: INFO nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 734.172235] env[61594]: DEBUG oslo.service.loopingcall [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 734.172357] env[61594]: DEBUG nova.compute.manager [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 734.172470] env[61594]: DEBUG nova.network.neutron [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 734.267023] env[61594]: DEBUG nova.network.neutron [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 734.268599] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 38586864bca04bac8b72723312653e18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 734.278761] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38586864bca04bac8b72723312653e18 [ 734.280132] env[61594]: DEBUG nova.network.neutron [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.280132] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 213b5681c8904f7e9bbdba22486a2a0c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 734.291039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 213b5681c8904f7e9bbdba22486a2a0c [ 734.292443] env[61594]: INFO nova.compute.manager [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Took 0.12 seconds to deallocate network for instance. [ 734.295562] env[61594]: DEBUG nova.compute.claims [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 734.295754] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.295972] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 734.297887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 020b7a50c8394347987e12ea31e91d37 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 734.352347] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 020b7a50c8394347987e12ea31e91d37 [ 734.463820] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7839de12-4ed5-4d16-a0dd-c4bf879af531 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.472575] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265931c9-a60a-4372-ae1d-664d67653484 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.506236] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f04616-7780-4e88-abca-35cfe941ff1f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.515968] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0964fd09-ef85-4c4b-bc27-4e125f7f1101 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.531006] env[61594]: DEBUG nova.compute.provider_tree [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.531581] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 63daf618a06b43ebb1b92af8d08ddd6b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 734.540539] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63daf618a06b43ebb1b92af8d08ddd6b [ 734.541357] env[61594]: DEBUG nova.scheduler.client.report [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 734.543690] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg dc1c77336c314ac2bca58e31ee95f422 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 734.560801] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc1c77336c314ac2bca58e31ee95f422 [ 734.561546] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.265s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.562375] env[61594]: ERROR nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Traceback (most recent call last): [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self.driver.spawn(context, instance, image_meta, [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] vm_ref = self.build_virtual_machine(instance, [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] vif_infos = vmwarevif.get_vif_info(self._session, [ 734.562375] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] for vif in network_info: [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return self._sync_wrapper(fn, *args, **kwargs) [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self.wait() [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self[:] = self._gt.wait() [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return self._exit_event.wait() [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] result = hub.switch() [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 734.562916] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return self.greenlet.switch() [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] result = function(*args, **kwargs) [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] return func(*args, **kwargs) [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] raise e [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] nwinfo = self.network_api.allocate_for_instance( [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] created_port_ids = self._update_ports_for_instance( [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] with excutils.save_and_reraise_exception(): [ 734.563372] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] self.force_reraise() [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] raise self.value [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] updated_port = self._update_port( [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] _ensure_no_port_binding_failure(port) [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] raise exception.PortBindingFailed(port_id=port['id']) [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] nova.exception.PortBindingFailed: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. [ 734.563692] env[61594]: ERROR nova.compute.manager [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] [ 734.564014] env[61594]: DEBUG nova.compute.utils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 734.564627] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Build of instance b465e4bb-f9f4-4fcf-97e7-ad38f33918a4 was re-scheduled: Binding failed for port 66c001d7-317a-4fa7-9e60-ec7751d386f5, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 734.565092] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 734.565304] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 734.565453] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.565612] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 734.566024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 3389f9684fd54089a65f345141673494 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 734.577042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3389f9684fd54089a65f345141673494 [ 734.885900] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 735.318657] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "cabdd4fe-9515-45f0-b596-862986f5733e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.319157] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "cabdd4fe-9515-45f0-b596-862986f5733e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.319609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg ee30d133c3224cd79f47bc5c317986bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.335418] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee30d133c3224cd79f47bc5c317986bd [ 735.335954] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 735.337702] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 7b3e76f7e4484421b9a055d066d4164f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.406837] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b3e76f7e4484421b9a055d066d4164f [ 735.432418] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.433046] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.434455] env[61594]: INFO nova.compute.claims [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 735.436326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg a5b7fc17921a4bd6998428a8ec87331c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.492513] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5b7fc17921a4bd6998428a8ec87331c [ 735.494172] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 9179c18e89844b8397df034b670c0601 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.505609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9179c18e89844b8397df034b670c0601 [ 735.619454] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cada1814-b873-4268-8168-0b03b1cbb51c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.628297] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e977a518-017f-401c-a674-6630d99ee252 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.661722] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40ed096-482c-4d5d-8b6d-c645d6deea7f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.670190] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7da4de-3389-42ab-bce7-7ba16a01327d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.684544] env[61594]: DEBUG nova.compute.provider_tree [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 735.685079] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 64f9fb67a144496e8db87d5800fe8122 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.696414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64f9fb67a144496e8db87d5800fe8122 [ 735.697337] env[61594]: DEBUG nova.scheduler.client.report [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 735.699738] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg b7107362cd3b449f978a7433da6c3f5d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.718021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7107362cd3b449f978a7433da6c3f5d [ 735.718021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.283s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.718021] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 735.718560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 07d1640d8e8543069b2003566f2587cd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.772057] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07d1640d8e8543069b2003566f2587cd [ 735.773483] env[61594]: DEBUG nova.compute.utils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.774088] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg bf9a2a94a0ce48b1926925f755d300dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.774920] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 735.775126] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 735.785935] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.786459] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg eb8fee991b6d4b3d9995aa0e5e7dc8cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.787879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf9a2a94a0ce48b1926925f755d300dd [ 735.788732] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 735.790127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 048c8a7f9a5b4805879c951ad9850de3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.801978] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb8fee991b6d4b3d9995aa0e5e7dc8cc [ 735.802953] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 735.803217] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 735.807019] env[61594]: DEBUG nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 735.807019] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 735.832642] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 048c8a7f9a5b4805879c951ad9850de3 [ 735.835979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 29568a7c4ae24a16822b2b1a294cfa3d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.879466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29568a7c4ae24a16822b2b1a294cfa3d [ 735.879466] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 735.919092] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 735.919261] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 735.919487] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 735.919628] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 735.919778] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 735.920028] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 735.920189] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 735.920861] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 735.920861] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 735.920861] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 735.920861] env[61594]: DEBUG nova.virt.hardware [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 735.922626] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80fe2a2-da7b-4cde-a9d1-5a5a9f824a7d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.925778] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 735.926866] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 70a967b079dc4a66a3c108207e11f83b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.933722] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bafe8b-0d5b-4599-b979-8a68664bed02 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.939395] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70a967b079dc4a66a3c108207e11f83b [ 735.940086] env[61594]: DEBUG nova.network.neutron [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.940661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg a84731253f8c49a481d7d5926892d6ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.953352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a84731253f8c49a481d7d5926892d6ef [ 735.953352] env[61594]: INFO nova.compute.manager [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Took 0.15 seconds to deallocate network for instance. [ 735.954556] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg a70fd8edb8c44338bf9d593e4e12e093 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 735.996025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a70fd8edb8c44338bf9d593e4e12e093 [ 735.996025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 64d76642fb71452da15c321844136ec4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 736.038072] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64d76642fb71452da15c321844136ec4 [ 736.063924] env[61594]: INFO nova.scheduler.client.report [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Deleted allocations for instance b465e4bb-f9f4-4fcf-97e7-ad38f33918a4 [ 736.072173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f4700feb1394487780962290d8f5c425 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 736.093738] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f4700feb1394487780962290d8f5c425 [ 736.096035] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a883ff76-8a52-41f9-815d-cb464c39e058 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.856s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.096035] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 13.332s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.096035] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.096035] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 736.096288] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.097674] env[61594]: INFO nova.compute.manager [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Terminating instance [ 736.099297] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.099456] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.099687] env[61594]: DEBUG nova.network.neutron [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.100139] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg b97a63ba1e80452c80e443d81a04752b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 736.114619] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b97a63ba1e80452c80e443d81a04752b [ 736.196357] env[61594]: ERROR nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. [ 736.196357] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 736.196357] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 736.196357] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 736.196357] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.196357] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 736.196357] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.196357] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 736.196357] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.196357] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 736.196357] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.196357] env[61594]: ERROR nova.compute.manager raise self.value [ 736.196357] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.196357] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 736.196357] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.196357] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 736.197192] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.197192] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 736.197192] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. [ 736.197192] env[61594]: ERROR nova.compute.manager [ 736.197192] env[61594]: Traceback (most recent call last): [ 736.197192] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 736.197192] env[61594]: listener.cb(fileno) [ 736.197192] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 736.197192] env[61594]: result = function(*args, **kwargs) [ 736.197192] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 736.197192] env[61594]: return func(*args, **kwargs) [ 736.197192] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 736.197192] env[61594]: raise e [ 736.197192] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 736.197192] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 736.197192] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.197192] env[61594]: created_port_ids = self._update_ports_for_instance( [ 736.197192] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.197192] env[61594]: with excutils.save_and_reraise_exception(): [ 736.197192] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.197192] env[61594]: self.force_reraise() [ 736.197192] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.197192] env[61594]: raise self.value [ 736.197192] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.197192] env[61594]: updated_port = self._update_port( [ 736.197192] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.197192] env[61594]: _ensure_no_port_binding_failure(port) [ 736.197192] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.197192] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 736.198727] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. [ 736.198727] env[61594]: Removing descriptor: 19 [ 736.198727] env[61594]: ERROR nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Traceback (most recent call last): [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] yield resources [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self.driver.spawn(context, instance, image_meta, [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 736.198727] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] vm_ref = self.build_virtual_machine(instance, [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] vif_infos = vmwarevif.get_vif_info(self._session, [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] for vif in network_info: [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return self._sync_wrapper(fn, *args, **kwargs) [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self.wait() [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self[:] = self._gt.wait() [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return self._exit_event.wait() [ 736.199242] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] result = hub.switch() [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return self.greenlet.switch() [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] result = function(*args, **kwargs) [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return func(*args, **kwargs) [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] raise e [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] nwinfo = self.network_api.allocate_for_instance( [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 736.199874] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] created_port_ids = self._update_ports_for_instance( [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] with excutils.save_and_reraise_exception(): [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self.force_reraise() [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] raise self.value [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] updated_port = self._update_port( [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] _ensure_no_port_binding_failure(port) [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 736.201580] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] raise exception.PortBindingFailed(port_id=port['id']) [ 736.201873] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] nova.exception.PortBindingFailed: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. [ 736.201873] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] [ 736.201873] env[61594]: INFO nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Terminating instance [ 736.201873] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Acquiring lock "refresh_cache-fd8bf9dd-bed4-4f79-9d5b-80073b19649a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.201873] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Acquired lock "refresh_cache-fd8bf9dd-bed4-4f79-9d5b-80073b19649a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.201873] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 736.202111] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 30bbcaa56568484aa938234d42307bfa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 736.214444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30bbcaa56568484aa938234d42307bfa [ 736.240927] env[61594]: DEBUG nova.network.neutron [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 736.352850] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 736.410931] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Successfully created port: 8cd305c7-8efe-43f6-b7a1-97c329a3b663 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.539651] env[61594]: DEBUG nova.policy [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6189f904167c4cfa9fe287573713f17b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7097bed89f7c4ec699866cb25bf49e47', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 737.096168] env[61594]: DEBUG nova.network.neutron [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.096705] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 892107354c8e4523ac49108935b54cb5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.107379] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 892107354c8e4523ac49108935b54cb5 [ 737.108093] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.108493] env[61594]: DEBUG nova.compute.manager [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 737.108688] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 737.109251] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed5026aa-d04b-482b-871b-28e996d9971e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.120623] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aee73c9-bb7c-493e-a938-36ae6c07f203 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.143357] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b465e4bb-f9f4-4fcf-97e7-ad38f33918a4 could not be found. [ 737.144184] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 737.144385] env[61594]: INFO nova.compute.manager [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 737.144643] env[61594]: DEBUG oslo.service.loopingcall [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.145029] env[61594]: DEBUG nova.compute.manager [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 737.145136] env[61594]: DEBUG nova.network.neutron [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 737.420739] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.421380] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 359ddd222a3f4b01b9d6e6d8d7e668f1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.434427] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 359ddd222a3f4b01b9d6e6d8d7e668f1 [ 737.435073] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Releasing lock "refresh_cache-fd8bf9dd-bed4-4f79-9d5b-80073b19649a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 737.435874] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 737.435874] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 737.436695] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2f83ac2-a4b1-4fcc-805b-a9c9f37e6ad6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.447331] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0bef047-9ff0-468d-8ea8-4eb16fd75495 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.462247] env[61594]: DEBUG nova.network.neutron [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.465080] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg db38d5aa48e84d1a9c6ca6e0f52a1bc7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.477142] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db38d5aa48e84d1a9c6ca6e0f52a1bc7 [ 737.477907] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fd8bf9dd-bed4-4f79-9d5b-80073b19649a could not be found. [ 737.478202] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 737.478583] env[61594]: INFO nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 737.478583] env[61594]: DEBUG oslo.service.loopingcall [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 737.479040] env[61594]: DEBUG nova.network.neutron [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.480500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c3389a6174644d9399e6d5b79e89975b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.484313] env[61594]: DEBUG nova.compute.manager [-] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 737.484609] env[61594]: DEBUG nova.network.neutron [-] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 737.493034] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3389a6174644d9399e6d5b79e89975b [ 737.493849] env[61594]: INFO nova.compute.manager [-] [instance: b465e4bb-f9f4-4fcf-97e7-ad38f33918a4] Took 0.35 seconds to deallocate network for instance. [ 737.500840] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 0d19e1325483434c9a2c4c33bfc4504d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.564711] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d19e1325483434c9a2c4c33bfc4504d [ 737.583561] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 4b73b9449d2d45c1b1be0643950a0cac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.591687] env[61594]: DEBUG nova.network.neutron [-] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.591687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f59dab8279864354a89016d2c58c66f0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.599587] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f59dab8279864354a89016d2c58c66f0 [ 737.600154] env[61594]: DEBUG nova.network.neutron [-] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.600666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4f91af05e79442d59e6768d47b3fb51d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.630272] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f91af05e79442d59e6768d47b3fb51d [ 737.632739] env[61594]: INFO nova.compute.manager [-] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Took 0.15 seconds to deallocate network for instance. [ 737.634399] env[61594]: DEBUG nova.compute.claims [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 737.635248] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.635248] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.636800] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg c9c8a06d7b854971824d11a72ef96241 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.663555] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b73b9449d2d45c1b1be0643950a0cac [ 737.666805] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "b465e4bb-f9f4-4fcf-97e7-ad38f33918a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.572s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.667555] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d40e6f39-27e1-4c56-9f14-ce465f696548 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg b0aa7cf7812b425389a9b127e2365194 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.682426] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9c8a06d7b854971824d11a72ef96241 [ 737.687290] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0aa7cf7812b425389a9b127e2365194 [ 737.782908] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a98e7d-dc53-4afb-bde5-fdbf563b896f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.794418] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de15887-20fc-461d-85e1-512513c4d57e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.828992] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75232164-059c-4ddc-a0fa-c4bae0340bc9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.837630] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7aff88-0719-4695-8e34-e4a7c35e75d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.856660] env[61594]: DEBUG nova.compute.provider_tree [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 737.857358] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg faffec3510974d4cbb3f1596b1cee5b2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.874100] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faffec3510974d4cbb3f1596b1cee5b2 [ 737.875652] env[61594]: DEBUG nova.scheduler.client.report [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 737.879081] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 78127ba2049c4f53b5886299c1e70906 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.896134] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78127ba2049c4f53b5886299c1e70906 [ 737.896353] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.261s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.896924] env[61594]: ERROR nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Traceback (most recent call last): [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self.driver.spawn(context, instance, image_meta, [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] vm_ref = self.build_virtual_machine(instance, [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.896924] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] for vif in network_info: [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return self._sync_wrapper(fn, *args, **kwargs) [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self.wait() [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self[:] = self._gt.wait() [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return self._exit_event.wait() [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] result = hub.switch() [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 737.898740] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return self.greenlet.switch() [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] result = function(*args, **kwargs) [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] return func(*args, **kwargs) [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] raise e [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] nwinfo = self.network_api.allocate_for_instance( [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] created_port_ids = self._update_ports_for_instance( [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] with excutils.save_and_reraise_exception(): [ 737.899264] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] self.force_reraise() [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] raise self.value [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] updated_port = self._update_port( [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] _ensure_no_port_binding_failure(port) [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] raise exception.PortBindingFailed(port_id=port['id']) [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] nova.exception.PortBindingFailed: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. [ 737.899630] env[61594]: ERROR nova.compute.manager [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] [ 737.900085] env[61594]: DEBUG nova.compute.utils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 737.900085] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Build of instance fd8bf9dd-bed4-4f79-9d5b-80073b19649a was re-scheduled: Binding failed for port 7cc4bed0-779d-4ad1-b83a-227f0c2229e4, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 737.900085] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 737.900085] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Acquiring lock "refresh_cache-fd8bf9dd-bed4-4f79-9d5b-80073b19649a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.900250] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Acquired lock "refresh_cache-fd8bf9dd-bed4-4f79-9d5b-80073b19649a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.900250] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 737.901131] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 1a39a849d1e04f4abe80d9560f067248 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 737.914225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a39a849d1e04f4abe80d9560f067248 [ 738.118121] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.250258] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.250492] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.252323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg ab52702bfd8645809ba5f63d8cfd4591 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.265983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab52702bfd8645809ba5f63d8cfd4591 [ 738.266683] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 738.268758] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 1e54b4c8dca443c2ba5bd8dc44467443 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.308930] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e54b4c8dca443c2ba5bd8dc44467443 [ 738.339630] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 738.339760] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 738.341516] env[61594]: INFO nova.compute.claims [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 738.343906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 5147bb75923e4e4483f7ddba43332897 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.399833] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5147bb75923e4e4483f7ddba43332897 [ 738.401879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg fd994b5f0ec34bb7939c37bde6659a18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.423783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd994b5f0ec34bb7939c37bde6659a18 [ 738.577293] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27bd6c3d-2212-48c0-98fd-8d923e7ec95a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.585254] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e47a45d-babb-46a0-964a-611ab3287b5f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.618259] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d317bc-5393-47bf-a754-77d5a7229086 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.626307] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31201d13-e018-46bc-8b27-1939f9e0f4a8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.640895] env[61594]: DEBUG nova.compute.provider_tree [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.641784] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg ac13386272a74392b168aae3991e8efd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.651132] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac13386272a74392b168aae3991e8efd [ 738.652202] env[61594]: DEBUG nova.scheduler.client.report [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 738.654981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 30631cbd77d042349e486e121cf3be00 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.663536] env[61594]: ERROR nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. [ 738.663536] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 738.663536] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 738.663536] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 738.663536] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 738.663536] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 738.663536] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 738.663536] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 738.663536] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.663536] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 738.663536] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.663536] env[61594]: ERROR nova.compute.manager raise self.value [ 738.663536] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 738.663536] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 738.663536] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.663536] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 738.664125] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.664125] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 738.664125] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. [ 738.664125] env[61594]: ERROR nova.compute.manager [ 738.664125] env[61594]: Traceback (most recent call last): [ 738.664125] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 738.664125] env[61594]: listener.cb(fileno) [ 738.664125] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 738.664125] env[61594]: result = function(*args, **kwargs) [ 738.664125] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 738.664125] env[61594]: return func(*args, **kwargs) [ 738.664125] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 738.664125] env[61594]: raise e [ 738.664125] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 738.664125] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 738.664125] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 738.664125] env[61594]: created_port_ids = self._update_ports_for_instance( [ 738.664125] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 738.664125] env[61594]: with excutils.save_and_reraise_exception(): [ 738.664125] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.664125] env[61594]: self.force_reraise() [ 738.664125] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.664125] env[61594]: raise self.value [ 738.664125] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 738.664125] env[61594]: updated_port = self._update_port( [ 738.664125] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.664125] env[61594]: _ensure_no_port_binding_failure(port) [ 738.664125] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.664125] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 738.664936] env[61594]: nova.exception.PortBindingFailed: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. [ 738.664936] env[61594]: Removing descriptor: 21 [ 738.664936] env[61594]: ERROR nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Traceback (most recent call last): [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] yield resources [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self.driver.spawn(context, instance, image_meta, [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 738.664936] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] vm_ref = self.build_virtual_machine(instance, [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] vif_infos = vmwarevif.get_vif_info(self._session, [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] for vif in network_info: [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return self._sync_wrapper(fn, *args, **kwargs) [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self.wait() [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self[:] = self._gt.wait() [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return self._exit_event.wait() [ 738.665300] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] result = hub.switch() [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return self.greenlet.switch() [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] result = function(*args, **kwargs) [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return func(*args, **kwargs) [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] raise e [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] nwinfo = self.network_api.allocate_for_instance( [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 738.665677] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] created_port_ids = self._update_ports_for_instance( [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] with excutils.save_and_reraise_exception(): [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self.force_reraise() [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] raise self.value [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] updated_port = self._update_port( [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] _ensure_no_port_binding_failure(port) [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 738.666060] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] raise exception.PortBindingFailed(port_id=port['id']) [ 738.666421] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] nova.exception.PortBindingFailed: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. [ 738.666421] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] [ 738.666421] env[61594]: INFO nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Terminating instance [ 738.666748] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Acquiring lock "refresh_cache-59555974-e1a8-467a-872e-5d0cd4daffdb" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 738.666891] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Acquired lock "refresh_cache-59555974-e1a8-467a-872e-5d0cd4daffdb" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.667373] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.667851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 3a7da691b03d4c8285513c45e435e779 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.674660] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30631cbd77d042349e486e121cf3be00 [ 738.674892] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.335s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 738.675508] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 738.678514] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 014413f1706a4b92bb31fc319040f8d0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.678987] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a7da691b03d4c8285513c45e435e779 [ 738.719765] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 014413f1706a4b92bb31fc319040f8d0 [ 738.721303] env[61594]: DEBUG nova.compute.utils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 738.721896] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg a297eb7410324c9b9a41cfef6984b109 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.722825] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 738.726023] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 738.732995] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a297eb7410324c9b9a41cfef6984b109 [ 738.734072] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 738.735893] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 92e17aba98234f52ba08869605f5c89c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.776153] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92e17aba98234f52ba08869605f5c89c [ 738.779674] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg a31f742f35064684a05dca8fcab1267a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 738.818181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a31f742f35064684a05dca8fcab1267a [ 738.819507] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 738.822829] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.854918] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 738.855063] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 738.855120] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 738.855338] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 738.855462] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 738.855663] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 738.855840] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 738.856013] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 738.856192] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 738.856370] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 738.856555] env[61594]: DEBUG nova.virt.hardware [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 738.857862] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d278d7e7-4f91-4131-b37e-9fcc7cffc66d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.867152] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d44516-dbd9-4c5e-9db8-1e0476c785fb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.253902] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.254512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg f7752dc8646040cc90aba89f9cde4c68 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 739.263989] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7752dc8646040cc90aba89f9cde4c68 [ 739.264700] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Releasing lock "refresh_cache-fd8bf9dd-bed4-4f79-9d5b-80073b19649a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.264922] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 739.265222] env[61594]: DEBUG nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 739.265335] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 739.373070] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.373693] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 86a41815c3d24b26a2fc3b01880be808 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 739.379187] env[61594]: DEBUG nova.policy [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d6b88b1f4394a63b32be00fc9dcc761', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57787a709d744ea4a19a2cfb923d89d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 739.398538] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86a41815c3d24b26a2fc3b01880be808 [ 739.399739] env[61594]: DEBUG nova.network.neutron [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.399739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 505cbcfcac9848a3a12ff1756fb2b31c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 739.411059] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 505cbcfcac9848a3a12ff1756fb2b31c [ 739.412248] env[61594]: INFO nova.compute.manager [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] [instance: fd8bf9dd-bed4-4f79-9d5b-80073b19649a] Took 0.15 seconds to deallocate network for instance. [ 739.414256] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 934d51f9c6cc4f0b976e11b3e04b2413 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 739.476603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 934d51f9c6cc4f0b976e11b3e04b2413 [ 739.481445] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg 6b5e4aecd56f4cd6ada43d4cd6e54ae6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 739.527661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b5e4aecd56f4cd6ada43d4cd6e54ae6 [ 739.577019] env[61594]: INFO nova.scheduler.client.report [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Deleted allocations for instance fd8bf9dd-bed4-4f79-9d5b-80073b19649a [ 739.581025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Expecting reply to msg ce8b912b0294466da517f990c09fe19d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 739.596696] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce8b912b0294466da517f990c09fe19d [ 739.598148] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4419b8b7-7473-4213-9100-092ec2267cc4 tempest-ImagesNegativeTestJSON-484615250 tempest-ImagesNegativeTestJSON-484615250-project-member] Lock "fd8bf9dd-bed4-4f79-9d5b-80073b19649a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.709s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.660444] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Successfully created port: 3499cf5d-3bf9-4c03-8f3d-25fd14830733 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 739.901179] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.901179] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 755eabe83d844e8787d3ebf9c24c5c52 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 739.913638] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 755eabe83d844e8787d3ebf9c24c5c52 [ 739.915459] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Releasing lock "refresh_cache-59555974-e1a8-467a-872e-5d0cd4daffdb" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.915459] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 739.915983] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 739.916619] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90579dc3-1a1d-421f-a540-89116e25c0e6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.929157] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681a4a9d-d17f-4fb7-8898-87877c34d768 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.953281] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 59555974-e1a8-467a-872e-5d0cd4daffdb could not be found. [ 739.953972] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 739.954187] env[61594]: INFO nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 739.954682] env[61594]: DEBUG oslo.service.loopingcall [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.955170] env[61594]: DEBUG nova.compute.manager [-] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 739.955481] env[61594]: DEBUG nova.network.neutron [-] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 740.071760] env[61594]: DEBUG nova.network.neutron [-] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.072323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg df9415c94ce641cc8229445ba6f149b8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.084674] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df9415c94ce641cc8229445ba6f149b8 [ 740.084911] env[61594]: DEBUG nova.network.neutron [-] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.085352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 48ac1903e9b64434b275100409f5ba78 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.096171] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48ac1903e9b64434b275100409f5ba78 [ 740.096649] env[61594]: INFO nova.compute.manager [-] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Took 0.14 seconds to deallocate network for instance. [ 740.099392] env[61594]: DEBUG nova.compute.claims [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 740.099571] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.099789] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.102014] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg b440a23479b0424682c7e8212cbb9aae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.156042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b440a23479b0424682c7e8212cbb9aae [ 740.263853] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e7f683-e431-4ca8-add4-55662652b059 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.271847] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e74982b-b255-47e4-823d-eeb4961595a0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.304506] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70b1396-1dd9-4b3c-9c39-21da6a0fe422 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.312651] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebb8baf-5d56-4c66-9662-414e6bbf4cf9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.334502] env[61594]: DEBUG nova.compute.provider_tree [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.335080] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 00ffb629eb264ed1a6b71823685c4369 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.350800] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00ffb629eb264ed1a6b71823685c4369 [ 740.352037] env[61594]: DEBUG nova.scheduler.client.report [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 740.354758] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg bb6519c2801142bfa5003070f28beb56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.368723] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb6519c2801142bfa5003070f28beb56 [ 740.369732] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.270s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 740.370410] env[61594]: ERROR nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Traceback (most recent call last): [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self.driver.spawn(context, instance, image_meta, [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] vm_ref = self.build_virtual_machine(instance, [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] vif_infos = vmwarevif.get_vif_info(self._session, [ 740.370410] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] for vif in network_info: [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return self._sync_wrapper(fn, *args, **kwargs) [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self.wait() [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self[:] = self._gt.wait() [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return self._exit_event.wait() [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] result = hub.switch() [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 740.371228] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return self.greenlet.switch() [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] result = function(*args, **kwargs) [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] return func(*args, **kwargs) [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] raise e [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] nwinfo = self.network_api.allocate_for_instance( [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] created_port_ids = self._update_ports_for_instance( [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] with excutils.save_and_reraise_exception(): [ 740.371643] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] self.force_reraise() [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] raise self.value [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] updated_port = self._update_port( [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] _ensure_no_port_binding_failure(port) [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] raise exception.PortBindingFailed(port_id=port['id']) [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] nova.exception.PortBindingFailed: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. [ 740.372076] env[61594]: ERROR nova.compute.manager [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] [ 740.373385] env[61594]: DEBUG nova.compute.utils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 740.373385] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Build of instance 59555974-e1a8-467a-872e-5d0cd4daffdb was re-scheduled: Binding failed for port fc83359a-f7b2-4614-bff2-cd718a34a194, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 740.373385] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 740.373571] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Acquiring lock "refresh_cache-59555974-e1a8-467a-872e-5d0cd4daffdb" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 740.373709] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Acquired lock "refresh_cache-59555974-e1a8-467a-872e-5d0cd4daffdb" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 740.374066] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 740.374419] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 254c31b4406c4cadbdeeb0a0b00a3201 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.384012] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 254c31b4406c4cadbdeeb0a0b00a3201 [ 740.589454] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 740.815877] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "5efb5557-deab-4eec-ac5a-7aabb4477c61" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.818024] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "5efb5557-deab-4eec-ac5a-7aabb4477c61" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.818024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 859ca2cc7bbf4f389d4c23be38f9dded in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.837912] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 859ca2cc7bbf4f389d4c23be38f9dded [ 740.838488] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 740.840535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 20cf45481d8a465085bf925403900aea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.887253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20cf45481d8a465085bf925403900aea [ 740.918999] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.919256] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 740.921209] env[61594]: INFO nova.compute.claims [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.923307] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 10acbae6173b4126a439474d7f559312 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.966377] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10acbae6173b4126a439474d7f559312 [ 740.968719] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 7c5f415347174bd7b99d4c24dfd2d1a3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 740.981650] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c5f415347174bd7b99d4c24dfd2d1a3 [ 741.092512] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8159d236-195a-4623-8f9a-b9e578801482 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.100917] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f87cdc1-3e62-4f98-baf2-4f649785ad61 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.134946] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c281fe2e-a8ff-4235-9be0-cdb4f9e250e6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.143298] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e10f38-8802-4ed7-8d47-3bddf056ed04 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.157176] env[61594]: DEBUG nova.compute.provider_tree [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 741.157757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg ffc3eab573d64bce8042dd304c2b4643 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.167584] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffc3eab573d64bce8042dd304c2b4643 [ 741.168829] env[61594]: DEBUG nova.scheduler.client.report [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 741.171634] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 685b6dac94474ec3a0332803d1a6f486 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.184981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 685b6dac94474ec3a0332803d1a6f486 [ 741.185878] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.267s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.186626] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 741.188372] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 5b6763860cce44db959ec16521342186 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.220559] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b6763860cce44db959ec16521342186 [ 741.223772] env[61594]: DEBUG nova.compute.utils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 741.224102] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 33a915acb9134c4b9643ec46c8f19599 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.225175] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 741.225339] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 741.245399] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33a915acb9134c4b9643ec46c8f19599 [ 741.245772] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 741.247697] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg b015754f46cc48eb87026a84ef79360a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.291134] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b015754f46cc48eb87026a84ef79360a [ 741.295449] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 623acc2f154e44b99a03e2b235621aaa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.354532] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 623acc2f154e44b99a03e2b235621aaa [ 741.356148] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 741.378562] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.379139] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg b390812776ae43bfb592a505fb38482d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.391307] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.391307] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.391307] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.391524] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.391618] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.391772] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.392056] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.393584] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.393730] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.393826] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.394062] env[61594]: DEBUG nova.virt.hardware [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.394839] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b390812776ae43bfb592a505fb38482d [ 741.395866] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfa68fa-9a6f-4ea8-8608-4ab99ea140a5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.399716] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Releasing lock "refresh_cache-59555974-e1a8-467a-872e-5d0cd4daffdb" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 741.399973] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 741.400143] env[61594]: DEBUG nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 741.400322] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 741.411203] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c1c9be-80a2-4bf2-ba77-be3091e67b8b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.500842] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 741.502980] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 35a8e709347a45dca1196d323220ec79 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.512225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35a8e709347a45dca1196d323220ec79 [ 741.512883] env[61594]: DEBUG nova.network.neutron [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.513379] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg 7ae2d44776c3445b92282e1d0e835c66 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.529941] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ae2d44776c3445b92282e1d0e835c66 [ 741.531158] env[61594]: INFO nova.compute.manager [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] [instance: 59555974-e1a8-467a-872e-5d0cd4daffdb] Took 0.13 seconds to deallocate network for instance. [ 741.532826] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg d16663b2c1d24375939eaf6a59982483 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.588917] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d16663b2c1d24375939eaf6a59982483 [ 741.591599] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg a8f6a8dfedc44429a92b81fdde204e5f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.639885] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8f6a8dfedc44429a92b81fdde204e5f [ 741.686677] env[61594]: INFO nova.scheduler.client.report [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Deleted allocations for instance 59555974-e1a8-467a-872e-5d0cd4daffdb [ 741.693810] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Expecting reply to msg a5ffa93fa0c547b9a5733f1ec948a79d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 741.706188] env[61594]: DEBUG nova.policy [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee5a21ff43314c1a857f6958056173f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afc5e909ec5c4dd983ece5aa3236910f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 741.717221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5ffa93fa0c547b9a5733f1ec948a79d [ 741.717953] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b9a97487-71d8-415d-99f1-92cac8219fa5 tempest-ServersNegativeTestJSON-52319824 tempest-ServersNegativeTestJSON-52319824-project-member] Lock "59555974-e1a8-467a-872e-5d0cd4daffdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.048s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 742.581549] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Successfully created port: d4b27b9b-cbb6-4137-9f20-14a7edbbb116 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.285217] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Acquiring lock "807b279c-5934-4b4b-977d-c02a8dcbbdb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.285495] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Lock "807b279c-5934-4b4b-977d-c02a8dcbbdb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.286306] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 715b460f238847238bb8ed0953243065 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.292177] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "51ae3e83-1cf3-4f56-b48d-b436ae84d706" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.292177] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "51ae3e83-1cf3-4f56-b48d-b436ae84d706" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.292177] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d0138e7c9fed44ba93e01650fdf90650 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.300956] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 715b460f238847238bb8ed0953243065 [ 744.301602] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 744.303300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 8c3da5daebd248d5b1e0610d5b4a6a2a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.309699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0138e7c9fed44ba93e01650fdf90650 [ 744.310218] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 744.311872] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 2e001ed0642d40c08e34e71ed04ff3cd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.380042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c3da5daebd248d5b1e0610d5b4a6a2a [ 744.385089] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e001ed0642d40c08e34e71ed04ff3cd [ 744.415990] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.415990] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.417583] env[61594]: INFO nova.compute.claims [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.419934] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg ffef1cb94da947758671162ee34396a0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.422142] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.445454] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Successfully created port: 5084e7ba-5652-4703-8329-ca8f37da9c52 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.470602] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffef1cb94da947758671162ee34396a0 [ 744.473888] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 5e0c93ecd0c04f50a1233b223fb01521 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.486660] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e0c93ecd0c04f50a1233b223fb01521 [ 744.611439] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4787e24-fe8b-48aa-81d8-a05bc1ebc48e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.626877] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b951e6-00bf-4ad6-8db8-b212a2457969 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.658680] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8911bb7d-aa6b-477e-9620-06b6f554d308 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.666309] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af40d1a-1af0-45d9-892c-0aa2a92b64a0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.681390] env[61594]: DEBUG nova.compute.provider_tree [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 744.681899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 8662711623fd4adc8e2e308fa3beff43 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.692256] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8662711623fd4adc8e2e308fa3beff43 [ 744.693235] env[61594]: DEBUG nova.scheduler.client.report [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 744.695799] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg b3217c73072046e28ffbe4db9efd045b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.710886] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3217c73072046e28ffbe4db9efd045b [ 744.711875] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 744.712382] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 744.714060] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 0c74b3f2d8354701a0ea7eb797d554de in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.715050] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.293s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 744.716539] env[61594]: INFO nova.compute.claims [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.718112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 61c8ed3d59e84b24a88bbdb40d2e2594 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.774739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c74b3f2d8354701a0ea7eb797d554de [ 744.776555] env[61594]: DEBUG nova.compute.utils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.777699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 5d88b234d20f47bc857e9dfbc413b2a0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.778429] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 744.781612] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 744.781612] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61c8ed3d59e84b24a88bbdb40d2e2594 [ 744.784986] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 54e7c1617a684ee78d5489c07b1800ce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.795066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54e7c1617a684ee78d5489c07b1800ce [ 744.799341] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d88b234d20f47bc857e9dfbc413b2a0 [ 744.800080] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 744.802375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 78fedc148ccb429da2fbeacdc9b8342f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.846146] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78fedc148ccb429da2fbeacdc9b8342f [ 744.849159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 694cc158915b4799af1520dca9fb629a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 744.892441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 694cc158915b4799af1520dca9fb629a [ 744.893733] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 744.939473] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 744.939711] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 744.939872] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 744.940156] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 744.940325] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 744.940475] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 744.940680] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 744.940839] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 744.941123] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 744.941335] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 744.941515] env[61594]: DEBUG nova.virt.hardware [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 744.942422] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d049514-1e21-420d-b54a-2d288d467987 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.946375] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f116306b-d48f-4250-8987-b0673357dc7e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.959304] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ce3eac-d773-4b62-8c8d-b8ea7ca1027f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.964438] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b107cad8-6f64-4ce1-abd6-9561c26bec7a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.003220] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a129c9d2-b1dc-4279-86e4-a382585a2262 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.011126] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba75a40-9652-41dc-9490-0102c32c6cf7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.024648] env[61594]: DEBUG nova.compute.provider_tree [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.025201] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 85ac6a62af0d44d0a2a8ac9feb30f994 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 745.036175] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85ac6a62af0d44d0a2a8ac9feb30f994 [ 745.037407] env[61594]: DEBUG nova.scheduler.client.report [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 745.044854] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 388adb766dba43f39b6a38cdc17a6a75 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 745.056801] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 388adb766dba43f39b6a38cdc17a6a75 [ 745.057710] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.343s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.058298] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 745.060425] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 2d885bc6a88647ffafdca190e3f44923 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 745.097504] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d885bc6a88647ffafdca190e3f44923 [ 745.098865] env[61594]: DEBUG nova.compute.utils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 745.099510] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 4c66eb128ed145d5b803e6e22de1b428 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 745.104020] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 745.104020] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 745.109455] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c66eb128ed145d5b803e6e22de1b428 [ 745.110071] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 745.111785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 9d1609732bfe44ddbe9be26892d96a57 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 745.146932] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d1609732bfe44ddbe9be26892d96a57 [ 745.149596] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 7e130f7299a2408ab0e2a2c9cff632b9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 745.185015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e130f7299a2408ab0e2a2c9cff632b9 [ 745.186361] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 745.204715] env[61594]: DEBUG nova.policy [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b20e19f3ac704956aae925f564d4b286', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4455e9f03bab49eba3d941e3565381cc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 745.220885] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 745.222018] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 745.222018] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.222018] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 745.222018] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.222367] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 745.222890] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 745.223240] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 745.223557] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 745.225119] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 745.225119] env[61594]: DEBUG nova.virt.hardware [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 745.225398] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce3e5028-1571-493a-8117-193f87b18f73 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.239707] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c0daf8-8184-46e4-973e-f4013f1ab75b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.637095] env[61594]: DEBUG nova.policy [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '789177a2f7be455cadec45cf03d67521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dfb77f12805418eaa6127fc75becec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 745.789189] env[61594]: ERROR nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. [ 745.789189] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 745.789189] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 745.789189] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 745.789189] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 745.789189] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 745.789189] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 745.789189] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 745.789189] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.789189] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 745.789189] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.789189] env[61594]: ERROR nova.compute.manager raise self.value [ 745.789189] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 745.789189] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 745.789189] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.789189] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 745.789683] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.789683] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 745.789683] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. [ 745.789683] env[61594]: ERROR nova.compute.manager [ 745.789683] env[61594]: Traceback (most recent call last): [ 745.789683] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 745.789683] env[61594]: listener.cb(fileno) [ 745.789683] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 745.789683] env[61594]: result = function(*args, **kwargs) [ 745.789683] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 745.789683] env[61594]: return func(*args, **kwargs) [ 745.789683] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 745.789683] env[61594]: raise e [ 745.789683] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 745.789683] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 745.789683] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 745.789683] env[61594]: created_port_ids = self._update_ports_for_instance( [ 745.789683] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 745.789683] env[61594]: with excutils.save_and_reraise_exception(): [ 745.789683] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.789683] env[61594]: self.force_reraise() [ 745.789683] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.789683] env[61594]: raise self.value [ 745.789683] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 745.789683] env[61594]: updated_port = self._update_port( [ 745.789683] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.789683] env[61594]: _ensure_no_port_binding_failure(port) [ 745.789683] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.789683] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 745.790525] env[61594]: nova.exception.PortBindingFailed: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. [ 745.790525] env[61594]: Removing descriptor: 23 [ 745.790525] env[61594]: ERROR nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Traceback (most recent call last): [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] yield resources [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self.driver.spawn(context, instance, image_meta, [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self._vmops.spawn(context, instance, image_meta, injected_files, [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 745.790525] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] vm_ref = self.build_virtual_machine(instance, [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] vif_infos = vmwarevif.get_vif_info(self._session, [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] for vif in network_info: [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return self._sync_wrapper(fn, *args, **kwargs) [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self.wait() [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self[:] = self._gt.wait() [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return self._exit_event.wait() [ 745.790873] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] result = hub.switch() [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return self.greenlet.switch() [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] result = function(*args, **kwargs) [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return func(*args, **kwargs) [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] raise e [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] nwinfo = self.network_api.allocate_for_instance( [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 745.791270] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] created_port_ids = self._update_ports_for_instance( [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] with excutils.save_and_reraise_exception(): [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self.force_reraise() [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] raise self.value [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] updated_port = self._update_port( [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] _ensure_no_port_binding_failure(port) [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 745.791632] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] raise exception.PortBindingFailed(port_id=port['id']) [ 745.791970] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] nova.exception.PortBindingFailed: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. [ 745.791970] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] [ 745.791970] env[61594]: INFO nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Terminating instance [ 745.801018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "refresh_cache-578faa1c-9edd-4ce3-8a5d-add49367d390" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 745.801018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquired lock "refresh_cache-578faa1c-9edd-4ce3-8a5d-add49367d390" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.801018] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 745.801018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg d9c6a92b5ce14f148704cd0154624d54 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 745.813624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9c6a92b5ce14f148704cd0154624d54 [ 745.890445] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 745.929592] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.628226] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.628776] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg f748f053b54c409bbd2c2a2a572a0821 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 746.641348] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f748f053b54c409bbd2c2a2a572a0821 [ 746.642040] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Releasing lock "refresh_cache-578faa1c-9edd-4ce3-8a5d-add49367d390" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 746.642649] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 746.643651] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 746.643651] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-59471d28-84dc-41a0-bddc-429cb73dd5d3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.653786] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8910db0c-81cf-49e8-b5e3-8d00800d4433 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.680943] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 578faa1c-9edd-4ce3-8a5d-add49367d390 could not be found. [ 746.682442] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 746.682442] env[61594]: INFO nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Took 0.04 seconds to destroy the instance on the hypervisor. [ 746.682442] env[61594]: DEBUG oslo.service.loopingcall [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 746.682660] env[61594]: DEBUG nova.compute.manager [-] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 746.682660] env[61594]: DEBUG nova.network.neutron [-] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 746.792455] env[61594]: DEBUG nova.network.neutron [-] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.792455] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 989a2b691276465cb091395b510ddc8a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 746.801996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 989a2b691276465cb091395b510ddc8a [ 746.801996] env[61594]: DEBUG nova.network.neutron [-] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.801996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2e8acc8eeaef40598a24d21af05e86c8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 746.815733] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e8acc8eeaef40598a24d21af05e86c8 [ 746.815733] env[61594]: INFO nova.compute.manager [-] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Took 0.13 seconds to deallocate network for instance. [ 746.819470] env[61594]: DEBUG nova.compute.claims [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 746.819470] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.819470] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.819470] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 88ce2f8c80244b44a57881b82b90ce58 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 746.874053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88ce2f8c80244b44a57881b82b90ce58 [ 746.940684] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Acquiring lock "681fb733-cd37-4f73-a487-e4856206907f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.940684] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Lock "681fb733-cd37-4f73-a487-e4856206907f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.940684] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 227c03a042fd46a28c6d31af4dec01c6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 746.952223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 227c03a042fd46a28c6d31af4dec01c6 [ 746.955016] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 746.955247] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg a5cbfe6c9c884744bed105081b3bd42c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 746.997561] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5cbfe6c9c884744bed105081b3bd42c [ 747.005258] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68cde9a-cfa4-4f1b-99af-aaad5b96430d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.013949] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 747.015065] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d274f9-822d-4590-89c0-488b6a99c92d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.050087] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebfe64c7-0563-4e17-b91a-58301f15a719 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.058523] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2ef8ce-0d5a-4f6f-ae59-b244b1c81113 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.073436] env[61594]: DEBUG nova.compute.provider_tree [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.074125] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 367be4dad8ab4c6ca16e78a0250cc7c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.082715] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 367be4dad8ab4c6ca16e78a0250cc7c5 [ 747.083651] env[61594]: DEBUG nova.scheduler.client.report [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 747.086086] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg ab714a9b83ac4745be02e73a779bb411 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.100869] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab714a9b83ac4745be02e73a779bb411 [ 747.102511] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.284s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.102511] env[61594]: ERROR nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. [ 747.102511] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Traceback (most recent call last): [ 747.102511] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 747.102511] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self.driver.spawn(context, instance, image_meta, [ 747.102511] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 747.102511] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self._vmops.spawn(context, instance, image_meta, injected_files, [ 747.102511] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 747.102511] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] vm_ref = self.build_virtual_machine(instance, [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] vif_infos = vmwarevif.get_vif_info(self._session, [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] for vif in network_info: [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return self._sync_wrapper(fn, *args, **kwargs) [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self.wait() [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self[:] = self._gt.wait() [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return self._exit_event.wait() [ 747.102796] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] result = hub.switch() [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return self.greenlet.switch() [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] result = function(*args, **kwargs) [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] return func(*args, **kwargs) [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] raise e [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] nwinfo = self.network_api.allocate_for_instance( [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 747.103115] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] created_port_ids = self._update_ports_for_instance( [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] with excutils.save_and_reraise_exception(): [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] self.force_reraise() [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] raise self.value [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] updated_port = self._update_port( [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] _ensure_no_port_binding_failure(port) [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.103425] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] raise exception.PortBindingFailed(port_id=port['id']) [ 747.103698] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] nova.exception.PortBindingFailed: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. [ 747.103698] env[61594]: ERROR nova.compute.manager [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] [ 747.103698] env[61594]: DEBUG nova.compute.utils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 747.104323] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.090s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 747.105768] env[61594]: INFO nova.compute.claims [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.107333] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg a4760336453d497894820dea42e924f9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.113482] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Build of instance 578faa1c-9edd-4ce3-8a5d-add49367d390 was re-scheduled: Binding failed for port b5fdbb40-b9cc-4ce3-b78b-ae060cf5a981, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 747.113938] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 747.114178] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "refresh_cache-578faa1c-9edd-4ce3-8a5d-add49367d390" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.114328] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquired lock "refresh_cache-578faa1c-9edd-4ce3-8a5d-add49367d390" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.114519] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.114853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 50a211305fa840a9a485ddb75221ed6e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.121285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50a211305fa840a9a485ddb75221ed6e [ 747.125912] env[61594]: ERROR nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. [ 747.125912] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 747.125912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 747.125912] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 747.125912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 747.125912] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 747.125912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 747.125912] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 747.125912] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.125912] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 747.125912] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.125912] env[61594]: ERROR nova.compute.manager raise self.value [ 747.125912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 747.125912] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 747.125912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.125912] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 747.126334] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.126334] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 747.126334] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. [ 747.126334] env[61594]: ERROR nova.compute.manager [ 747.126495] env[61594]: Traceback (most recent call last): [ 747.126549] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 747.126549] env[61594]: listener.cb(fileno) [ 747.126549] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 747.126549] env[61594]: result = function(*args, **kwargs) [ 747.126549] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 747.126549] env[61594]: return func(*args, **kwargs) [ 747.126549] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 747.126549] env[61594]: raise e [ 747.126765] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 747.126765] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 747.126765] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 747.126765] env[61594]: created_port_ids = self._update_ports_for_instance( [ 747.126765] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 747.126765] env[61594]: with excutils.save_and_reraise_exception(): [ 747.126765] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.126765] env[61594]: self.force_reraise() [ 747.126765] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.126765] env[61594]: raise self.value [ 747.126765] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 747.126765] env[61594]: updated_port = self._update_port( [ 747.126765] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.126765] env[61594]: _ensure_no_port_binding_failure(port) [ 747.126765] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.126765] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 747.126765] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. [ 747.126765] env[61594]: Removing descriptor: 20 [ 747.128019] env[61594]: ERROR nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Traceback (most recent call last): [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] yield resources [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self.driver.spawn(context, instance, image_meta, [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self._vmops.spawn(context, instance, image_meta, injected_files, [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] vm_ref = self.build_virtual_machine(instance, [ 747.128019] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] vif_infos = vmwarevif.get_vif_info(self._session, [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] for vif in network_info: [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return self._sync_wrapper(fn, *args, **kwargs) [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self.wait() [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self[:] = self._gt.wait() [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return self._exit_event.wait() [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 747.128327] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] result = hub.switch() [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return self.greenlet.switch() [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] result = function(*args, **kwargs) [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return func(*args, **kwargs) [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] raise e [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] nwinfo = self.network_api.allocate_for_instance( [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] created_port_ids = self._update_ports_for_instance( [ 747.128663] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] with excutils.save_and_reraise_exception(): [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self.force_reraise() [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] raise self.value [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] updated_port = self._update_port( [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] _ensure_no_port_binding_failure(port) [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] raise exception.PortBindingFailed(port_id=port['id']) [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] nova.exception.PortBindingFailed: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. [ 747.128988] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] [ 747.129329] env[61594]: INFO nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Terminating instance [ 747.132868] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-e5d9ba62-f701-4c6a-8dbe-1bd401db3343" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 747.132868] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-e5d9ba62-f701-4c6a-8dbe-1bd401db3343" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.132868] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.132868] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg d00d713083f64fc882f659dcead70ce4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.144273] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d00d713083f64fc882f659dcead70ce4 [ 747.156909] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4760336453d497894820dea42e924f9 [ 747.159103] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 352084c261dc4d1d8f082158044f3d7b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.170578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 352084c261dc4d1d8f082158044f3d7b [ 747.220857] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 747.233054] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 747.287549] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48745790-a182-4b00-a0ba-88835fc2afce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.295231] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5e9e57-ddc2-41a2-9713-91963ed0a5cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.325825] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fe3a770-1ef0-468f-985a-607ab8136def {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.334628] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304c9633-3e6b-4e1a-8045-7ae924f957f5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.352578] env[61594]: DEBUG nova.compute.provider_tree [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.353112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 49d0998c795346ae91903a0e0dae73f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.364336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49d0998c795346ae91903a0e0dae73f4 [ 747.365392] env[61594]: DEBUG nova.scheduler.client.report [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 747.369195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg f67b31cc916c4a6aa5a08477bb37e6ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.384565] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f67b31cc916c4a6aa5a08477bb37e6ff [ 747.385453] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.281s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 747.385964] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 747.391021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 73e4517ef22844ecbb18958b76b0a83d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.438546] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73e4517ef22844ecbb18958b76b0a83d [ 747.440133] env[61594]: DEBUG nova.compute.utils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 747.440790] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg f29e354ba0d84b7eb1907b2761e0a4f2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.441845] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 747.442055] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 747.456140] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f29e354ba0d84b7eb1907b2761e0a4f2 [ 747.456760] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 747.458520] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 197aa6b4bb504e8f928d9877df666104 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.491739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 197aa6b4bb504e8f928d9877df666104 [ 747.495388] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg e8bfea675ac04ebf93e0d82c4c8230e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.543459] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.543927] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.543927] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 747.544063] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 747.548204] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e8bfea675ac04ebf93e0d82c4c8230e3 [ 747.549173] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 747.578781] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 747.579126] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 747.579364] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.579608] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 747.579813] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.580068] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 747.580367] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 747.580593] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 747.580822] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 747.581085] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 747.581321] env[61594]: DEBUG nova.virt.hardware [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 747.582500] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3371580-968f-4885-833b-e855a29593be {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.585725] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Successfully created port: 010a6d62-e081-47f2-83de-049e88757d9d {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.593845] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5597652a-ea3c-428e-9c35-2eb4d9989c47 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.751780] env[61594]: DEBUG nova.policy [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1aeb3b98fa3b4e81845ed61d742f6258', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3c37c09a0a33411f8aa3240ac2adcff5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 747.922717] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.922717] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg f2d838cb6efa4a459f5399d01f7fa5eb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 747.938266] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2d838cb6efa4a459f5399d01f7fa5eb [ 747.939412] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Releasing lock "refresh_cache-578faa1c-9edd-4ce3-8a5d-add49367d390" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.939792] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 747.940153] env[61594]: DEBUG nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 747.940469] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 747.999915] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.999915] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 3aa8d517b8134052b11f7f6cbba76789 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.007708] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3aa8d517b8134052b11f7f6cbba76789 [ 748.008855] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-e5d9ba62-f701-4c6a-8dbe-1bd401db3343" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.009751] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 748.010589] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 748.012605] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bce9b9e6-e503-4997-a34a-c99cd3474211 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.021434] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8cb44a-072f-42bf-a17c-82e563a10708 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.039494] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.040063] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg e669d2007748415b9713b874431612f6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.048303] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e5d9ba62-f701-4c6a-8dbe-1bd401db3343 could not be found. [ 748.048458] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 748.048650] env[61594]: INFO nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Took 0.04 seconds to destroy the instance on the hypervisor. [ 748.048901] env[61594]: DEBUG oslo.service.loopingcall [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.049146] env[61594]: DEBUG nova.compute.manager [-] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 748.049245] env[61594]: DEBUG nova.network.neutron [-] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 748.055391] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e669d2007748415b9713b874431612f6 [ 748.055859] env[61594]: DEBUG nova.network.neutron [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.056339] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 7172a656c3cc4276874980b877a1c81d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.064271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7172a656c3cc4276874980b877a1c81d [ 748.064840] env[61594]: INFO nova.compute.manager [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: 578faa1c-9edd-4ce3-8a5d-add49367d390] Took 0.12 seconds to deallocate network for instance. [ 748.066648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 2d2c1b4655d345a3a9dbab09b5343239 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.115675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d2c1b4655d345a3a9dbab09b5343239 [ 748.119956] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 6501205905fa45f6b26300ed31d382cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.148955] env[61594]: DEBUG nova.network.neutron [-] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.149542] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 168f8e0398e9496f86796e95cdd775e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.163594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6501205905fa45f6b26300ed31d382cc [ 748.167668] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 168f8e0398e9496f86796e95cdd775e1 [ 748.168388] env[61594]: DEBUG nova.network.neutron [-] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.168892] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a524c055a49840dcb2aee968bc59f668 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.181923] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a524c055a49840dcb2aee968bc59f668 [ 748.183430] env[61594]: INFO nova.compute.manager [-] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Took 0.13 seconds to deallocate network for instance. [ 748.185923] env[61594]: DEBUG nova.compute.claims [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 748.186399] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.186736] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.189782] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 446b80bdfee040dc887b4c6d9ac2d7de in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.196018] env[61594]: INFO nova.scheduler.client.report [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Deleted allocations for instance 578faa1c-9edd-4ce3-8a5d-add49367d390 [ 748.199708] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg a683762a52d742e18823549d7c16a5e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.202112] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Successfully created port: 8d3bba46-c3ed-46dc-b832-da5d946bb2f5 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.232446] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 446b80bdfee040dc887b4c6d9ac2d7de [ 748.237328] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a683762a52d742e18823549d7c16a5e1 [ 748.237903] env[61594]: DEBUG oslo_concurrency.lockutils [None req-cf61da20-a151-401c-941c-0cce708baa2b tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "578faa1c-9edd-4ce3-8a5d-add49367d390" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.638s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.279230] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "45a40160-c224-4a8f-8e92-26d770d4ff4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.279537] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "45a40160-c224-4a8f-8e92-26d770d4ff4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.280677] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg f6be329b2bef4a5c848900990c788425 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.293496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6be329b2bef4a5c848900990c788425 [ 748.293974] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 748.295619] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg a09f65a94d434e6692ae6cc0c66bdbdc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.335906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a09f65a94d434e6692ae6cc0c66bdbdc [ 748.356666] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.379654] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86634084-5dfb-497b-9121-e0ce77b23c8d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.388808] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35aeed3-2a85-4294-867f-6345258bf4e9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.422739] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285c7ae8-e723-4d6a-807f-0bcc148b1836 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.430919] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1043180-39b7-4eda-9507-e11f3a2ccf1c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.451025] env[61594]: DEBUG nova.compute.provider_tree [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.451599] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 9ec9a8fd6402446db1a53e50eaa88a8d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.466395] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ec9a8fd6402446db1a53e50eaa88a8d [ 748.466395] env[61594]: DEBUG nova.scheduler.client.report [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 748.466963] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 5f5780aed33144449176985d045758ce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.482096] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f5780aed33144449176985d045758ce [ 748.483151] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.296s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.483625] env[61594]: ERROR nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Traceback (most recent call last): [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self.driver.spawn(context, instance, image_meta, [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] vm_ref = self.build_virtual_machine(instance, [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.483625] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] for vif in network_info: [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return self._sync_wrapper(fn, *args, **kwargs) [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self.wait() [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self[:] = self._gt.wait() [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return self._exit_event.wait() [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] result = hub.switch() [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 748.483956] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return self.greenlet.switch() [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] result = function(*args, **kwargs) [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] return func(*args, **kwargs) [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] raise e [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] nwinfo = self.network_api.allocate_for_instance( [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] created_port_ids = self._update_ports_for_instance( [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] with excutils.save_and_reraise_exception(): [ 748.484459] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] self.force_reraise() [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] raise self.value [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] updated_port = self._update_port( [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] _ensure_no_port_binding_failure(port) [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] raise exception.PortBindingFailed(port_id=port['id']) [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] nova.exception.PortBindingFailed: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. [ 748.484794] env[61594]: ERROR nova.compute.manager [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] [ 748.485113] env[61594]: DEBUG nova.compute.utils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.488073] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.129s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.488938] env[61594]: INFO nova.compute.claims [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.491094] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 457e0db9c0864f25b5a3d9cb281e6835 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.494676] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Build of instance e5d9ba62-f701-4c6a-8dbe-1bd401db3343 was re-scheduled: Binding failed for port 8cd305c7-8efe-43f6-b7a1-97c329a3b663, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 748.495473] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 748.495831] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-e5d9ba62-f701-4c6a-8dbe-1bd401db3343" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.497561] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-e5d9ba62-f701-4c6a-8dbe-1bd401db3343" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.497561] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.497695] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 532314955cc047da8b8d6a0b792b4eb5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.506122] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 532314955cc047da8b8d6a0b792b4eb5 [ 748.541199] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 457e0db9c0864f25b5a3d9cb281e6835 [ 748.543062] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 99f0976ffe78482288c9305f3663bb06 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.544206] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.545609] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 748.545609] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 748.545609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 4d567f50281e4626851c47bf5d438adb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.557023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99f0976ffe78482288c9305f3663bb06 [ 748.568326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d567f50281e4626851c47bf5d438adb [ 748.570486] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 748.570653] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 748.570791] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 748.570928] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 748.571131] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 748.571284] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 748.571412] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 748.571545] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 748.572023] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.572231] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 748.572584] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 4f643880b19b45e68b7118892a80d8ee in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.583803] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f643880b19b45e68b7118892a80d8ee [ 748.584766] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.702802] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2fcbb7-4b0e-413d-9631-e8664f832bbb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.712821] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bb8cfe5-cfb2-434b-853d-a0e1a7d2a017 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.765077] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afc48ac-9c8d-4fab-8e83-21d775af6a8c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.765077] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc08516a-29ba-45e8-a3a3-ee36ae5e2044 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.775630] env[61594]: DEBUG nova.compute.provider_tree [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.776152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg b52fbadcb9d142cfb3d6542f2db764ce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.785867] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b52fbadcb9d142cfb3d6542f2db764ce [ 748.786831] env[61594]: DEBUG nova.scheduler.client.report [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 748.789233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 570ac6729bc442f0bf8a9be7be580080 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.799855] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 570ac6729bc442f0bf8a9be7be580080 [ 748.800908] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.315s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.802019] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 748.803912] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 8903e4cb760740f9824bdb60f534c48b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.804793] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.220s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.804980] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.807518] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 748.807518] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968afc30-660b-4e8c-99ea-88d083a7270f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.815341] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c960c68-1a75-471e-b6b2-a605e02f33db {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.820449] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.834175] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18fcbce-3f1c-4b03-b4d2-f3f48b13adf8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.842272] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e60083-0439-4dcb-91ea-6db824a5b828 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.847977] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8903e4cb760740f9824bdb60f534c48b [ 748.849252] env[61594]: DEBUG nova.compute.utils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 748.849844] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 306030c2e6ee40b095383ba52f4236c0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.877194] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 748.877413] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 748.883247] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 306030c2e6ee40b095383ba52f4236c0 [ 748.883247] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181525MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 748.883247] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 748.883247] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.883247] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 9b52ae6da4a04e3dbb641f1441363759 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.883247] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 748.884889] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 0818fecfe8eb4ee1b5c2a5f4102f4b58 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.923333] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b52ae6da4a04e3dbb641f1441363759 [ 748.924233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0818fecfe8eb4ee1b5c2a5f4102f4b58 [ 748.928068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 1fefac9e467e4881b508bc923d3fc1ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.933321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg d44ad6b275a64a3883d5e53b212d4e98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.943788] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fefac9e467e4881b508bc923d3fc1ff [ 748.965736] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg ef76aeb5c50c4d2d8b64e0060c48fad2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 748.972613] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d44ad6b275a64a3883d5e53b212d4e98 [ 748.973639] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 748.997693] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef76aeb5c50c4d2d8b64e0060c48fad2 [ 748.998642] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance e5d9ba62-f701-4c6a-8dbe-1bd401db3343 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 748.998793] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance cabdd4fe-9515-45f0-b596-862986f5733e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.998920] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance a2429a5f-fe61-46b9-a71d-a6ddd62a6e08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.999054] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 5efb5557-deab-4eec-ac5a-7aabb4477c61 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.999180] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 51ae3e83-1cf3-4f56-b48d-b436ae84d706 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.999298] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 807b279c-5934-4b4b-977d-c02a8dcbbdb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.999471] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 681fb733-cd37-4f73-a487-e4856206907f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.999521] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 45a40160-c224-4a8f-8e92-26d770d4ff4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 748.999709] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 748.999867] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 749.013727] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.013950] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.014150] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.014345] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.014494] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.014639] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.014932] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.015021] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.015453] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.015453] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.015610] env[61594]: DEBUG nova.virt.hardware [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.017402] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6679128-d0e7-42f3-b307-0636a706d48f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.027660] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90010da3-35fc-4789-a9c7-d2e700fd12ee {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.146304] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9a2c92-7b33-4e0e-abe5-812d09cdb60f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.154679] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1ba9a3-a0cd-40ed-8e91-a6a1a7ea4986 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.188595] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2766e16a-cc2c-42e4-9088-f13a3dd5b571 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.196413] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff75ae2-2737-44e1-b706-e7119dcffc27 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.210327] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.210806] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg dadda96db31c4892a077bd1ae6abf76c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.219018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dadda96db31c4892a077bd1ae6abf76c [ 749.219948] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 749.222293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg d719e91b57674f448c8ad62b5ab63e5f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.252409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d719e91b57674f448c8ad62b5ab63e5f [ 749.252409] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 749.252409] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.371s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.305250] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Acquiring lock "d16dff71-2dab-469a-8cb1-40ed086c42d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.306527] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Lock "d16dff71-2dab-469a-8cb1-40ed086c42d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.309523] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 3551b23df40b4a9d9ccedda17d942df9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.327949] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3551b23df40b4a9d9ccedda17d942df9 [ 749.327949] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 749.327949] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 82daa2576d4d4b22961c56e5f4614c1d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.368689] env[61594]: DEBUG nova.policy [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f05a61e0f6499bb35c44d254226249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bff3be1976444e58a2b7be93d47f50ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 749.373017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82daa2576d4d4b22961c56e5f4614c1d [ 749.399273] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 749.399273] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 749.399273] env[61594]: INFO nova.compute.claims [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.401705] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 569734d95c1d4bd8bc09dad63c8a9237 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.459428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 569734d95c1d4bd8bc09dad63c8a9237 [ 749.462121] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 59003ea065c24393a864cfa28b9ed99e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.481034] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59003ea065c24393a864cfa28b9ed99e [ 749.576211] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.576785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 63e7ed9415f340bfbfb5869a9825a8c8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.595027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63e7ed9415f340bfbfb5869a9825a8c8 [ 749.595809] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-e5d9ba62-f701-4c6a-8dbe-1bd401db3343" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.595944] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 749.596148] env[61594]: DEBUG nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 749.596329] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 749.654130] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6653f07-583d-4273-8021-15e8b559402f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.661795] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23136d93-e016-4002-91f7-1d92b38a2233 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.694206] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b50ce8-30bb-4842-8046-0a765aa07f45 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.702385] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.702975] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 10d33495904241979c6826859d3e866f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.704737] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0185286-2200-494d-925e-56b099bf4751 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.710176] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10d33495904241979c6826859d3e866f [ 749.710659] env[61594]: DEBUG nova.network.neutron [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.711170] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 2dce6a6d64ea4620b934806a8222ab03 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.719938] env[61594]: DEBUG nova.compute.provider_tree [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.720617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 7cbe02664a144155a49927ebb1b1b6a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.727602] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dce6a6d64ea4620b934806a8222ab03 [ 749.728200] env[61594]: INFO nova.compute.manager [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: e5d9ba62-f701-4c6a-8dbe-1bd401db3343] Took 0.13 seconds to deallocate network for instance. [ 749.729959] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 5241367252c34887bfa9d3bb48085844 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.738661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7cbe02664a144155a49927ebb1b1b6a7 [ 749.739572] env[61594]: DEBUG nova.scheduler.client.report [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 749.743018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 3f29f5d4340549f09b1caa33fe68cc93 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.758336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f29f5d4340549f09b1caa33fe68cc93 [ 749.759557] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.362s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.760067] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 749.763048] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg d8dcefd683a24e2c81a0227ac56e80e8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.801891] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5241367252c34887bfa9d3bb48085844 [ 749.804575] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg e65384bd55f74fa98128eb9e530067e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.810359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8dcefd683a24e2c81a0227ac56e80e8 [ 749.811244] env[61594]: DEBUG nova.compute.utils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 749.812268] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg aa342db7cc654e17a53e8a5e72801b24 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.813092] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 749.813760] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 749.823858] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa342db7cc654e17a53e8a5e72801b24 [ 749.824277] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 749.825878] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg edecdcefe9354007adcca68d79ed4bac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.858975] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e65384bd55f74fa98128eb9e530067e4 [ 749.862804] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edecdcefe9354007adcca68d79ed4bac [ 749.865089] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 540c3064dca045cdb3ac40a7cb3ab8df in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.900027] env[61594]: INFO nova.scheduler.client.report [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Deleted allocations for instance e5d9ba62-f701-4c6a-8dbe-1bd401db3343 [ 749.906067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg c10221fc97064bda964d24a2858dff37 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 749.914692] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 540c3064dca045cdb3ac40a7cb3ab8df [ 749.915156] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 749.930235] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c10221fc97064bda964d24a2858dff37 [ 749.930885] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f29ed5bc-91ef-4153-adc4-2a9a1cf4f3ca tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "e5d9ba62-f701-4c6a-8dbe-1bd401db3343" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.817s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 749.949658] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 749.950520] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 749.950520] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 749.950520] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 749.950520] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 749.950911] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 749.951545] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 749.951545] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 749.951784] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 749.952030] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 749.952278] env[61594]: DEBUG nova.virt.hardware [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.957041] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92407596-c711-4493-b890-df352af56974 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.967525] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332d0375-d66b-4298-8fe3-24351eb862e9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.223498] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.223866] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.239333] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Successfully created port: cf2a5b97-befe-4276-9dc3-bea4d29d0e6a {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.379163] env[61594]: DEBUG nova.policy [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f35b9f152d5463797ee9ca40e847a12', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0f89ca420ba4732b58127285d726cb2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 750.734953] env[61594]: ERROR nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. [ 750.734953] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 750.734953] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 750.734953] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 750.734953] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 750.734953] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 750.734953] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 750.734953] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 750.734953] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.734953] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 750.734953] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.734953] env[61594]: ERROR nova.compute.manager raise self.value [ 750.734953] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 750.734953] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 750.734953] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.734953] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 750.736253] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.736253] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 750.736253] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. [ 750.736253] env[61594]: ERROR nova.compute.manager [ 750.736253] env[61594]: Traceback (most recent call last): [ 750.736253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 750.736253] env[61594]: listener.cb(fileno) [ 750.736253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 750.736253] env[61594]: result = function(*args, **kwargs) [ 750.736253] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 750.736253] env[61594]: return func(*args, **kwargs) [ 750.736253] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 750.736253] env[61594]: raise e [ 750.736253] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 750.736253] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 750.736253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 750.736253] env[61594]: created_port_ids = self._update_ports_for_instance( [ 750.736253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 750.736253] env[61594]: with excutils.save_and_reraise_exception(): [ 750.736253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.736253] env[61594]: self.force_reraise() [ 750.736253] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.736253] env[61594]: raise self.value [ 750.736253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 750.736253] env[61594]: updated_port = self._update_port( [ 750.736253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.736253] env[61594]: _ensure_no_port_binding_failure(port) [ 750.736253] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.736253] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 750.737546] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. [ 750.737546] env[61594]: Removing descriptor: 24 [ 750.737546] env[61594]: ERROR nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Traceback (most recent call last): [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] yield resources [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self.driver.spawn(context, instance, image_meta, [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 750.737546] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] vm_ref = self.build_virtual_machine(instance, [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] vif_infos = vmwarevif.get_vif_info(self._session, [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] for vif in network_info: [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return self._sync_wrapper(fn, *args, **kwargs) [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self.wait() [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self[:] = self._gt.wait() [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return self._exit_event.wait() [ 750.737872] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] result = hub.switch() [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return self.greenlet.switch() [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] result = function(*args, **kwargs) [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return func(*args, **kwargs) [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] raise e [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] nwinfo = self.network_api.allocate_for_instance( [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 750.738219] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] created_port_ids = self._update_ports_for_instance( [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] with excutils.save_and_reraise_exception(): [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self.force_reraise() [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] raise self.value [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] updated_port = self._update_port( [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] _ensure_no_port_binding_failure(port) [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 750.738596] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] raise exception.PortBindingFailed(port_id=port['id']) [ 750.738931] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] nova.exception.PortBindingFailed: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. [ 750.738931] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] [ 750.738931] env[61594]: INFO nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Terminating instance [ 750.743266] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "refresh_cache-cabdd4fe-9515-45f0-b596-862986f5733e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 750.743266] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquired lock "refresh_cache-cabdd4fe-9515-45f0-b596-862986f5733e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.743266] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 750.743266] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 1fae3bdb9de34a1ea42e7f5068b11c25 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 750.753303] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fae3bdb9de34a1ea42e7f5068b11c25 [ 750.852307] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 751.252595] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.254926] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 6116f8ae6d374e0abf088933d4bc770c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.265039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6116f8ae6d374e0abf088933d4bc770c [ 751.265835] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Releasing lock "refresh_cache-cabdd4fe-9515-45f0-b596-862986f5733e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 751.266129] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 751.266331] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 751.267069] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-092529b1-eb08-4cb2-8d86-e72ef0c4ec84 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.278150] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f798f21-6e3c-43a6-96fe-41bb483f1120 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.306175] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cabdd4fe-9515-45f0-b596-862986f5733e could not be found. [ 751.306175] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 751.306298] env[61594]: INFO nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 751.306560] env[61594]: DEBUG oslo.service.loopingcall [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 751.307906] env[61594]: DEBUG nova.compute.manager [-] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 751.308099] env[61594]: DEBUG nova.network.neutron [-] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 751.352168] env[61594]: ERROR nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. [ 751.352168] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 751.352168] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 751.352168] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 751.352168] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 751.352168] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 751.352168] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 751.352168] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 751.352168] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.352168] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 751.352168] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.352168] env[61594]: ERROR nova.compute.manager raise self.value [ 751.352168] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 751.352168] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 751.352168] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.352168] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 751.352628] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.352628] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 751.352628] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. [ 751.352628] env[61594]: ERROR nova.compute.manager [ 751.352628] env[61594]: Traceback (most recent call last): [ 751.352628] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 751.352628] env[61594]: listener.cb(fileno) [ 751.352628] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 751.352628] env[61594]: result = function(*args, **kwargs) [ 751.352628] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 751.352628] env[61594]: return func(*args, **kwargs) [ 751.352628] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 751.352628] env[61594]: raise e [ 751.352628] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 751.352628] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 751.352628] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 751.352628] env[61594]: created_port_ids = self._update_ports_for_instance( [ 751.352628] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 751.352628] env[61594]: with excutils.save_and_reraise_exception(): [ 751.352628] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.352628] env[61594]: self.force_reraise() [ 751.352628] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.352628] env[61594]: raise self.value [ 751.352628] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 751.352628] env[61594]: updated_port = self._update_port( [ 751.352628] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.352628] env[61594]: _ensure_no_port_binding_failure(port) [ 751.352628] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.352628] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 751.353410] env[61594]: nova.exception.PortBindingFailed: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. [ 751.353410] env[61594]: Removing descriptor: 19 [ 751.353410] env[61594]: ERROR nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Traceback (most recent call last): [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] yield resources [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self.driver.spawn(context, instance, image_meta, [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self._vmops.spawn(context, instance, image_meta, injected_files, [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 751.353410] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] vm_ref = self.build_virtual_machine(instance, [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] vif_infos = vmwarevif.get_vif_info(self._session, [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] for vif in network_info: [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return self._sync_wrapper(fn, *args, **kwargs) [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self.wait() [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self[:] = self._gt.wait() [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return self._exit_event.wait() [ 751.353751] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] result = hub.switch() [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return self.greenlet.switch() [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] result = function(*args, **kwargs) [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return func(*args, **kwargs) [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] raise e [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] nwinfo = self.network_api.allocate_for_instance( [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 751.356263] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] created_port_ids = self._update_ports_for_instance( [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] with excutils.save_and_reraise_exception(): [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self.force_reraise() [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] raise self.value [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] updated_port = self._update_port( [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] _ensure_no_port_binding_failure(port) [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.356765] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] raise exception.PortBindingFailed(port_id=port['id']) [ 751.357188] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] nova.exception.PortBindingFailed: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. [ 751.357188] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] [ 751.357188] env[61594]: INFO nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Terminating instance [ 751.358127] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "refresh_cache-a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.360414] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquired lock "refresh_cache-a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.360414] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 751.360414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg f6d7920777b7471893a04d9ec5dc501c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.371516] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6d7920777b7471893a04d9ec5dc501c [ 751.399108] env[61594]: DEBUG nova.network.neutron [-] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 751.399754] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6fb5861d3b924a4e82f8cd2686cc1413 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.417604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fb5861d3b924a4e82f8cd2686cc1413 [ 751.418101] env[61594]: DEBUG nova.network.neutron [-] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.418519] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3c781bb8cc264c17864b394a2056e747 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.432074] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c781bb8cc264c17864b394a2056e747 [ 751.432752] env[61594]: INFO nova.compute.manager [-] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Took 0.12 seconds to deallocate network for instance. [ 751.437220] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 751.439844] env[61594]: DEBUG nova.compute.claims [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 751.440875] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.441191] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.443204] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg f0f4553e30e744798ff612768a2cbab1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.469925] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Successfully created port: e4f28593-3f76-493e-acd6-31f97463f969 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.520443] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0f4553e30e744798ff612768a2cbab1 [ 751.704149] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589ff4e7-ed21-4504-acd3-5c9b29d8094b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.712310] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c7bb79-3a20-4642-a5ca-99414fc47d6e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.747156] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35af14ed-62e1-4faa-87f9-1887e0a933c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.757679] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee69300-ffe2-4ffd-968a-1006728f24a1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.775845] env[61594]: DEBUG nova.compute.provider_tree [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.776607] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 6e85f0f85cbb49628bd728471bf4046c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.791754] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e85f0f85cbb49628bd728471bf4046c [ 751.791754] env[61594]: DEBUG nova.scheduler.client.report [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 751.794307] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 9bcf02d6050b4f72a0144d4948db9bd8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.810912] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bcf02d6050b4f72a0144d4948db9bd8 [ 751.811693] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.370s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.813055] env[61594]: ERROR nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Traceback (most recent call last): [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self.driver.spawn(context, instance, image_meta, [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] vm_ref = self.build_virtual_machine(instance, [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] vif_infos = vmwarevif.get_vif_info(self._session, [ 751.813055] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] for vif in network_info: [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return self._sync_wrapper(fn, *args, **kwargs) [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self.wait() [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self[:] = self._gt.wait() [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return self._exit_event.wait() [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] result = hub.switch() [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 751.813537] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return self.greenlet.switch() [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] result = function(*args, **kwargs) [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] return func(*args, **kwargs) [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] raise e [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] nwinfo = self.network_api.allocate_for_instance( [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] created_port_ids = self._update_ports_for_instance( [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] with excutils.save_and_reraise_exception(): [ 751.814033] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] self.force_reraise() [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] raise self.value [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] updated_port = self._update_port( [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] _ensure_no_port_binding_failure(port) [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] raise exception.PortBindingFailed(port_id=port['id']) [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] nova.exception.PortBindingFailed: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. [ 751.814547] env[61594]: ERROR nova.compute.manager [instance: cabdd4fe-9515-45f0-b596-862986f5733e] [ 751.814985] env[61594]: DEBUG nova.compute.utils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 751.816434] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Build of instance cabdd4fe-9515-45f0-b596-862986f5733e was re-scheduled: Binding failed for port 3499cf5d-3bf9-4c03-8f3d-25fd14830733, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 751.816434] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 751.816611] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquiring lock "refresh_cache-cabdd4fe-9515-45f0-b596-862986f5733e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 751.816756] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Acquired lock "refresh_cache-cabdd4fe-9515-45f0-b596-862986f5733e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.816919] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 751.817531] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 48ed87ca04a14336becfc2e7f7bc40cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 751.827344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48ed87ca04a14336becfc2e7f7bc40cc [ 751.908373] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 752.174018] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.174848] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 8bea26c959514328aa1c7c9fcea36f8b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.183517] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bea26c959514328aa1c7c9fcea36f8b [ 752.184131] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Releasing lock "refresh_cache-a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.184594] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 752.184707] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 752.185241] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1616ee38-3e02-4d24-af29-cc9184d3d5bb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.194858] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9479a9cf-4e21-4584-8256-abdbd4657eb2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.219205] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a2429a5f-fe61-46b9-a71d-a6ddd62a6e08 could not be found. [ 752.219489] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 752.219635] env[61594]: INFO nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Took 0.03 seconds to destroy the instance on the hypervisor. [ 752.220126] env[61594]: DEBUG oslo.service.loopingcall [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 752.220598] env[61594]: DEBUG nova.compute.manager [-] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 752.220817] env[61594]: DEBUG nova.network.neutron [-] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 752.254049] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.254578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg ae0fd929209d40a2a9c122cab3c7a13d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.263796] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae0fd929209d40a2a9c122cab3c7a13d [ 752.264472] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Releasing lock "refresh_cache-cabdd4fe-9515-45f0-b596-862986f5733e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 752.264794] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 752.265314] env[61594]: DEBUG nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 752.266182] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 752.307811] env[61594]: DEBUG nova.network.neutron [-] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 752.308378] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7849617c246742d2853a07ae97072d8d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.317737] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7849617c246742d2853a07ae97072d8d [ 752.318347] env[61594]: DEBUG nova.network.neutron [-] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.318737] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c8c454f2490243d48975c7d51158f400 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.327962] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8c454f2490243d48975c7d51158f400 [ 752.328544] env[61594]: INFO nova.compute.manager [-] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Took 0.11 seconds to deallocate network for instance. [ 752.330611] env[61594]: DEBUG nova.compute.claims [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 752.331144] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.331144] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.334030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg fd5ddcf755d248d8b382f6a7c2403b2f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.339393] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 752.339907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 8b79d4bd26344643a5f6f9cfd71f5b23 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.347486] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b79d4bd26344643a5f6f9cfd71f5b23 [ 752.348138] env[61594]: DEBUG nova.network.neutron [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.348457] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 4cb08d7570ba498c91e981a36ac00042 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.373390] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cb08d7570ba498c91e981a36ac00042 [ 752.373390] env[61594]: INFO nova.compute.manager [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] [instance: cabdd4fe-9515-45f0-b596-862986f5733e] Took 0.11 seconds to deallocate network for instance. [ 752.377560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg 788f3c09dfaa435a9acfe474168ec4c0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.409016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd5ddcf755d248d8b382f6a7c2403b2f [ 752.453914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 788f3c09dfaa435a9acfe474168ec4c0 [ 752.456606] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg d9f2fa2804db4c9d826bab430b26a3bf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.489320] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Successfully created port: 8dfc2970-8f62-41a9-be95-d63bee3b3b33 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 752.500447] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9f2fa2804db4c9d826bab430b26a3bf [ 752.551308] env[61594]: INFO nova.scheduler.client.report [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Deleted allocations for instance cabdd4fe-9515-45f0-b596-862986f5733e [ 752.558493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Expecting reply to msg f988f93cf6e343d6b65f52c1eb89a4c6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.582232] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f988f93cf6e343d6b65f52c1eb89a4c6 [ 752.582815] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c9b99250-a19a-4128-b657-87181a2897a6 tempest-ListImageFiltersTestJSON-328745172 tempest-ListImageFiltersTestJSON-328745172-project-member] Lock "cabdd4fe-9515-45f0-b596-862986f5733e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.264s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.615779] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8164cb81-bb17-4cc4-ba9a-50cefb914742 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.623056] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4329ecf-82f2-43ba-a1f0-a83c009ed7f3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.660645] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8891743-c545-4301-b680-bc5c5519ccea {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.665367] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670c03b1-237a-49ca-9d0e-dcc99a826658 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.673452] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Acquiring lock "c588118f-5b15-4262-8d21-c4fbc3088f1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.674095] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Lock "c588118f-5b15-4262-8d21-c4fbc3088f1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.674211] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 13164c62536d473d8490a58da98d33e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.683853] env[61594]: DEBUG nova.compute.provider_tree [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.688149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg a839f5dd8fef437d9458dd9248238f67 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.688149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13164c62536d473d8490a58da98d33e4 [ 752.688149] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 752.689108] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 4cf074ed404f4a86a31be3c6fe3c6281 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.692512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a839f5dd8fef437d9458dd9248238f67 [ 752.693223] env[61594]: DEBUG nova.scheduler.client.report [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 752.695387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 2543d829f2ef4216b73c219c78d23234 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.706323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2543d829f2ef4216b73c219c78d23234 [ 752.707188] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.376s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.707649] env[61594]: ERROR nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Traceback (most recent call last): [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self.driver.spawn(context, instance, image_meta, [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] vm_ref = self.build_virtual_machine(instance, [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] vif_infos = vmwarevif.get_vif_info(self._session, [ 752.707649] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] for vif in network_info: [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return self._sync_wrapper(fn, *args, **kwargs) [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self.wait() [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self[:] = self._gt.wait() [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return self._exit_event.wait() [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] result = hub.switch() [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 752.708166] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return self.greenlet.switch() [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] result = function(*args, **kwargs) [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] return func(*args, **kwargs) [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] raise e [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] nwinfo = self.network_api.allocate_for_instance( [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] created_port_ids = self._update_ports_for_instance( [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] with excutils.save_and_reraise_exception(): [ 752.708570] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] self.force_reraise() [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] raise self.value [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] updated_port = self._update_port( [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] _ensure_no_port_binding_failure(port) [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] raise exception.PortBindingFailed(port_id=port['id']) [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] nova.exception.PortBindingFailed: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. [ 752.708872] env[61594]: ERROR nova.compute.manager [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] [ 752.709347] env[61594]: DEBUG nova.compute.utils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 752.710094] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Build of instance a2429a5f-fe61-46b9-a71d-a6ddd62a6e08 was re-scheduled: Binding failed for port d4b27b9b-cbb6-4137-9f20-14a7edbbb116, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 752.710527] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 752.710749] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "refresh_cache-a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.710895] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquired lock "refresh_cache-a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.711070] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 752.711460] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg ef31d5b073944379aef825dd59c19745 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.719707] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef31d5b073944379aef825dd59c19745 [ 752.724105] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4cf074ed404f4a86a31be3c6fe3c6281 [ 752.744370] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 752.744590] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.746058] env[61594]: INFO nova.compute.claims [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.747838] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 2a868d478a584195813e0bdffad34751 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.781733] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 752.784164] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a868d478a584195813e0bdffad34751 [ 752.786083] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 08912457c81f4aa5a26ff9d7c485ca28 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.794327] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08912457c81f4aa5a26ff9d7c485ca28 [ 752.923125] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bcd03d-0da1-4c2b-92dd-0a04d17f94e7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.931547] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990af573-28eb-426c-b171-7d3590cf381d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.961895] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96ee138-57c8-493e-aad3-8dd21f5214e2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.968693] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f17ee049-e1b5-43df-b86e-e197130a2196 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.981562] env[61594]: DEBUG nova.compute.provider_tree [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 752.982070] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 0fbc10d8f61842b7a9c46b6c224de273 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 752.989387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fbc10d8f61842b7a9c46b6c224de273 [ 752.990298] env[61594]: DEBUG nova.scheduler.client.report [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 752.992527] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg b2eeb54d74b74c8abd7c93a4c12db02f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.003533] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2eeb54d74b74c8abd7c93a4c12db02f [ 753.004247] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.260s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.004693] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 753.006365] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 5173e74888cd4391afec4c37157ce5cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.040916] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5173e74888cd4391afec4c37157ce5cc [ 753.042743] env[61594]: DEBUG nova.compute.utils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 753.043348] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg f156c007da174dd7b81a20c54b95d924 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.044405] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 753.044586] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 753.054120] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f156c007da174dd7b81a20c54b95d924 [ 753.054646] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 753.056210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 6076f51c8da64251a3f789127826e55b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.087317] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6076f51c8da64251a3f789127826e55b [ 753.090249] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg b503c9d970af455b87d8a368cbc91d63 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.120638] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b503c9d970af455b87d8a368cbc91d63 [ 753.121854] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 753.146301] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 753.146556] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 753.146713] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 753.146896] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 753.147051] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 753.147208] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 753.147417] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 753.147576] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 753.147742] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 753.147917] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 753.148318] env[61594]: DEBUG nova.virt.hardware [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 753.149237] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d54f8a-059d-4b74-bc96-d5f759387905 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.157209] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb72e0f-be7f-411d-89e7-3a055f67b3d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.415401] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.415764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg dc6c9ebc73854f5e92ac32dd2bcd61eb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.428616] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc6c9ebc73854f5e92ac32dd2bcd61eb [ 753.429292] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Releasing lock "refresh_cache-a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.429514] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 753.429698] env[61594]: DEBUG nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 753.429866] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 753.527663] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 753.528306] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg bf72cab4e0f342c68c3c20e4033324bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.541763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf72cab4e0f342c68c3c20e4033324bc [ 753.542403] env[61594]: DEBUG nova.network.neutron [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.542870] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg d538eb36756a4b7080b5634c0d1d19a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.550790] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d538eb36756a4b7080b5634c0d1d19a4 [ 753.551376] env[61594]: INFO nova.compute.manager [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: a2429a5f-fe61-46b9-a71d-a6ddd62a6e08] Took 0.12 seconds to deallocate network for instance. [ 753.553347] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg fe1b1806c3d84e51b6f418722a4247ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.601196] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe1b1806c3d84e51b6f418722a4247ff [ 753.604010] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 3408b0447fa344c793e11cad5c705d67 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.641709] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3408b0447fa344c793e11cad5c705d67 [ 753.645937] env[61594]: DEBUG nova.policy [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0697390bff094da5b8ec0fbb9ce233a8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '79a9ed8a5ae24f64a929062d2664a4c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 753.670139] env[61594]: INFO nova.scheduler.client.report [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Deleted allocations for instance a2429a5f-fe61-46b9-a71d-a6ddd62a6e08 [ 753.676058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 68776e3f6c7245c3b17b2e906f485c08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.695535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68776e3f6c7245c3b17b2e906f485c08 [ 753.696220] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a8e543f2-be34-4fba-842c-908ed995d413 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "a2429a5f-fe61-46b9-a71d-a6ddd62a6e08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.446s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 753.808021] env[61594]: ERROR nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. [ 753.808021] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 753.808021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 753.808021] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 753.808021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.808021] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 753.808021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.808021] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 753.808021] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.808021] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 753.808021] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.808021] env[61594]: ERROR nova.compute.manager raise self.value [ 753.808021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.808021] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 753.808021] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.808021] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 753.808660] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.808660] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 753.808660] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. [ 753.808660] env[61594]: ERROR nova.compute.manager [ 753.808660] env[61594]: Traceback (most recent call last): [ 753.808660] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 753.808660] env[61594]: listener.cb(fileno) [ 753.808660] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 753.808660] env[61594]: result = function(*args, **kwargs) [ 753.808660] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 753.808660] env[61594]: return func(*args, **kwargs) [ 753.808660] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 753.808660] env[61594]: raise e [ 753.808660] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 753.808660] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 753.808660] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.808660] env[61594]: created_port_ids = self._update_ports_for_instance( [ 753.808660] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.808660] env[61594]: with excutils.save_and_reraise_exception(): [ 753.808660] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.808660] env[61594]: self.force_reraise() [ 753.808660] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.808660] env[61594]: raise self.value [ 753.808660] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.808660] env[61594]: updated_port = self._update_port( [ 753.808660] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.808660] env[61594]: _ensure_no_port_binding_failure(port) [ 753.808660] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.808660] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 753.809372] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. [ 753.809372] env[61594]: Removing descriptor: 22 [ 753.809372] env[61594]: ERROR nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Traceback (most recent call last): [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] yield resources [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self.driver.spawn(context, instance, image_meta, [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 753.809372] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] vm_ref = self.build_virtual_machine(instance, [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] vif_infos = vmwarevif.get_vif_info(self._session, [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] for vif in network_info: [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return self._sync_wrapper(fn, *args, **kwargs) [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self.wait() [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self[:] = self._gt.wait() [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return self._exit_event.wait() [ 753.809710] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] result = hub.switch() [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return self.greenlet.switch() [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] result = function(*args, **kwargs) [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return func(*args, **kwargs) [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] raise e [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] nwinfo = self.network_api.allocate_for_instance( [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 753.810081] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] created_port_ids = self._update_ports_for_instance( [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] with excutils.save_and_reraise_exception(): [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self.force_reraise() [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] raise self.value [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] updated_port = self._update_port( [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] _ensure_no_port_binding_failure(port) [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 753.810448] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] raise exception.PortBindingFailed(port_id=port['id']) [ 753.810781] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] nova.exception.PortBindingFailed: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. [ 753.810781] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] [ 753.810781] env[61594]: INFO nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Terminating instance [ 753.814954] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-5efb5557-deab-4eec-ac5a-7aabb4477c61" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 753.814954] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-5efb5557-deab-4eec-ac5a-7aabb4477c61" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.814954] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 753.814954] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg b36be349a4734ed8abe94ef27cf7715e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 753.826999] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b36be349a4734ed8abe94ef27cf7715e [ 753.870182] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 754.109018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "de519927-4dbd-4e6b-ba0a-d02d01950749" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.109018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "de519927-4dbd-4e6b-ba0a-d02d01950749" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.109018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 8d89d39abbaa43e5a58a279b4fc9d6de in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.115945] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d89d39abbaa43e5a58a279b4fc9d6de [ 754.116433] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 754.118682] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg d1df34d0c7bf4bfeb537ab74a0fe8380 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.154889] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d1df34d0c7bf4bfeb537ab74a0fe8380 [ 754.184676] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.184936] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.186785] env[61594]: INFO nova.compute.claims [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 754.188509] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 3794acd7792742f19755f2eb2ab33ce7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.230805] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3794acd7792742f19755f2eb2ab33ce7 [ 754.232914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg e6653f7479864a8e83673caa6d523360 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.240732] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6653f7479864a8e83673caa6d523360 [ 754.319390] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.319915] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 593a4b1f3a714e5abbede15fb051affa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.332913] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 593a4b1f3a714e5abbede15fb051affa [ 754.333589] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-5efb5557-deab-4eec-ac5a-7aabb4477c61" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.333989] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 754.334202] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 754.337139] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31685a81-9617-4ed3-9035-c690067f2890 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.347836] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052b8f2d-1733-4482-8dab-e9242863520d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.379661] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5efb5557-deab-4eec-ac5a-7aabb4477c61 could not be found. [ 754.379917] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 754.380141] env[61594]: INFO nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Took 0.05 seconds to destroy the instance on the hypervisor. [ 754.380645] env[61594]: DEBUG oslo.service.loopingcall [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.380645] env[61594]: DEBUG nova.compute.manager [-] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 754.380805] env[61594]: DEBUG nova.network.neutron [-] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 754.386283] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7bcb8e4-1229-4ad3-aafc-906c1627da5d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.395290] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70ce245-69e6-4e46-b7b5-b11666ebcf60 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.432467] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e59b0ce-b463-4d77-8644-d93e5cd99914 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.440445] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a8793a-7f26-42b4-a350-b6c83099ca9f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.458379] env[61594]: DEBUG nova.compute.provider_tree [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.459135] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg beec6af1719847068b1d1278de512a1d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.471598] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg beec6af1719847068b1d1278de512a1d [ 754.472281] env[61594]: DEBUG nova.scheduler.client.report [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 754.475358] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 18066c7aa09e440386efa5c52f43f77a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.498965] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18066c7aa09e440386efa5c52f43f77a [ 754.500064] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.315s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.500483] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 754.502477] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 2c2cd6af57904f67b25c0c8b10ad1a05 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.554490] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c2cd6af57904f67b25c0c8b10ad1a05 [ 754.556381] env[61594]: DEBUG nova.compute.utils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 754.557544] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 54e3c4ebcef748868bf926c6cc95e391 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.559134] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 754.559385] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 754.578131] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54e3c4ebcef748868bf926c6cc95e391 [ 754.578829] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 754.582183] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg acf18d73266c4ea49cad855f727554b8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.624245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acf18d73266c4ea49cad855f727554b8 [ 754.627993] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 4be1b2fd41464b1e81b565d65557a30c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.663542] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4be1b2fd41464b1e81b565d65557a30c [ 754.664871] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 754.668325] env[61594]: DEBUG nova.network.neutron [-] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 754.668717] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d46128f7ee734a29947b6c2fd1b956a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.679721] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d46128f7ee734a29947b6c2fd1b956a1 [ 754.680088] env[61594]: DEBUG nova.network.neutron [-] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.680441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cf86efd90884459ab1f5691e3db2acff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.689127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf86efd90884459ab1f5691e3db2acff [ 754.689599] env[61594]: INFO nova.compute.manager [-] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Took 0.31 seconds to deallocate network for instance. [ 754.692069] env[61594]: DEBUG nova.compute.claims [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 754.692261] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 754.692510] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 754.694483] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg d0e8ae97b4dd4ceebdd4b9ed2beb6ed6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.704365] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 754.704596] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 754.704778] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 754.704928] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 754.705093] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 754.705242] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 754.705505] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 754.705702] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 754.705872] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 754.706051] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 754.706230] env[61594]: DEBUG nova.virt.hardware [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.707096] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9fa464b-ba3e-406d-bffc-7835145c9aeb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.715653] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e92e8a4-ea77-42a4-9156-a300de3db293 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.734982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0e8ae97b4dd4ceebdd4b9ed2beb6ed6 [ 754.842488] env[61594]: DEBUG nova.policy [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38ed2b39a2d0462095de83291bca4587', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '19b2170b4bde4207b5a774f1cd266796', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 754.887676] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd882dc-7307-4be2-a4ce-a0179d1aa16d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.896205] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adc519b-d178-41f7-bb5f-9ed231167360 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.931781] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef19f08e-381a-4d65-b3c6-4de574dfd179 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.940139] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c2bfc67-7234-4f4d-aa06-93a2c45fbd42 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.954362] env[61594]: DEBUG nova.compute.provider_tree [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.955168] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 3c0fe8505662409e92ee2d5107682416 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.966318] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c0fe8505662409e92ee2d5107682416 [ 754.966930] env[61594]: DEBUG nova.scheduler.client.report [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 754.969539] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg cdb0a621e4ec4c658e3e45eca7dba350 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 754.986533] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdb0a621e4ec4c658e3e45eca7dba350 [ 754.987369] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.295s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 754.988412] env[61594]: ERROR nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Traceback (most recent call last): [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self.driver.spawn(context, instance, image_meta, [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self._vmops.spawn(context, instance, image_meta, injected_files, [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] vm_ref = self.build_virtual_machine(instance, [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] vif_infos = vmwarevif.get_vif_info(self._session, [ 754.988412] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] for vif in network_info: [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return self._sync_wrapper(fn, *args, **kwargs) [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self.wait() [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self[:] = self._gt.wait() [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return self._exit_event.wait() [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] result = hub.switch() [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 754.988729] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return self.greenlet.switch() [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] result = function(*args, **kwargs) [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] return func(*args, **kwargs) [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] raise e [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] nwinfo = self.network_api.allocate_for_instance( [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] created_port_ids = self._update_ports_for_instance( [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] with excutils.save_and_reraise_exception(): [ 754.989050] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] self.force_reraise() [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] raise self.value [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] updated_port = self._update_port( [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] _ensure_no_port_binding_failure(port) [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] raise exception.PortBindingFailed(port_id=port['id']) [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] nova.exception.PortBindingFailed: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. [ 754.989350] env[61594]: ERROR nova.compute.manager [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] [ 754.989608] env[61594]: DEBUG nova.compute.utils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 754.991750] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Build of instance 5efb5557-deab-4eec-ac5a-7aabb4477c61 was re-scheduled: Binding failed for port 5084e7ba-5652-4703-8329-ca8f37da9c52, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 754.992297] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 754.992751] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-5efb5557-deab-4eec-ac5a-7aabb4477c61" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 754.992751] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-5efb5557-deab-4eec-ac5a-7aabb4477c61" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.992914] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 754.993330] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 289126399d8e4178ab57fb7b717fbd3b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.002556] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 289126399d8e4178ab57fb7b717fbd3b [ 755.073566] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.525293] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.526248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg dc93626cb46b4a659bde3a343b36168b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.540106] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc93626cb46b4a659bde3a343b36168b [ 755.540977] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-5efb5557-deab-4eec-ac5a-7aabb4477c61" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 755.541103] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 755.541817] env[61594]: DEBUG nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 755.541864] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 755.617405] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.619081] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 0d67b7b07993476da2ed28c6f42c7ea9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.629727] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d67b7b07993476da2ed28c6f42c7ea9 [ 755.630396] env[61594]: DEBUG nova.network.neutron [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.631382] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 1ae03c7dbd6d48abbf33e8ae7182d88b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.647554] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ae03c7dbd6d48abbf33e8ae7182d88b [ 755.648305] env[61594]: INFO nova.compute.manager [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 5efb5557-deab-4eec-ac5a-7aabb4477c61] Took 0.11 seconds to deallocate network for instance. [ 755.650040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 573d89137c5f4fa580703af97bf77ac9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.701285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 573d89137c5f4fa580703af97bf77ac9 [ 755.704154] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg a5e7c02fa15d474582293d0cd8ce5c15 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.748810] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5e7c02fa15d474582293d0cd8ce5c15 [ 755.781254] env[61594]: INFO nova.scheduler.client.report [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Deleted allocations for instance 5efb5557-deab-4eec-ac5a-7aabb4477c61 [ 755.792590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 434276f7b5004153a5990523eb9ecdfa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.812459] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 434276f7b5004153a5990523eb9ecdfa [ 755.813140] env[61594]: DEBUG oslo_concurrency.lockutils [None req-af44bf33-46f6-40c1-8f64-f891e9f36c52 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "5efb5557-deab-4eec-ac5a-7aabb4477c61" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.997s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.899979] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "3d6ead4a-228d-48da-89b3-4fa99f6299d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.899979] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "3d6ead4a-228d-48da-89b3-4fa99f6299d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.901287] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 6f2fed641b0645c1b22987582458100d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.914971] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f2fed641b0645c1b22987582458100d [ 755.915771] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 755.917445] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg a1d3d5246f17461291f1d675cff7be14 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 755.946123] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Successfully created port: cfc56a70-5dc7-481d-ab71-f33ec5765463 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.967619] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1d3d5246f17461291f1d675cff7be14 [ 755.986190] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.987176] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.989735] env[61594]: INFO nova.compute.claims [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.992033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 36e18d0e7b824a48bd47d9daf6544868 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.041600] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36e18d0e7b824a48bd47d9daf6544868 [ 756.044240] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg aa8efaa6a0384ce48c3e036f196a7df8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.051633] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa8efaa6a0384ce48c3e036f196a7df8 [ 756.194130] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774d7b47-0289-400a-ac66-15d2726692ac {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.205150] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304ef93e-47b0-49ea-a654-1478d9589d6f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.237893] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e86cfc-dee9-4617-ad1b-f18dc9f83160 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.245813] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d0b941-0873-4bf2-ab34-a86f7260e01b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.261310] env[61594]: DEBUG nova.compute.provider_tree [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.261658] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 2c563e54afc94e7eb0642938baf7f267 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.271667] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c563e54afc94e7eb0642938baf7f267 [ 756.272707] env[61594]: DEBUG nova.scheduler.client.report [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 756.275226] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 40fc28b9952f4503b0a23360ee843c73 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.286688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40fc28b9952f4503b0a23360ee843c73 [ 756.287448] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 756.287929] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 756.289671] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg a8b300e41b374048a9f36850c4dd061b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.332146] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8b300e41b374048a9f36850c4dd061b [ 756.334463] env[61594]: DEBUG nova.compute.utils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 756.335251] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 40d9e168f6c44a3e9bc0336abe8e2377 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.336946] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 756.336946] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 756.349697] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40d9e168f6c44a3e9bc0336abe8e2377 [ 756.350460] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 756.352352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg ea17a81d2cbd4c77b0e38dc6128219fa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.406851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea17a81d2cbd4c77b0e38dc6128219fa [ 756.410351] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 3e84fcad9ca04a228b055799a774d4f6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.449027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e84fcad9ca04a228b055799a774d4f6 [ 756.449027] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 756.474218] env[61594]: ERROR nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. [ 756.474218] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 756.474218] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 756.474218] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 756.474218] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.474218] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 756.474218] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.474218] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 756.474218] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.474218] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 756.474218] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.474218] env[61594]: ERROR nova.compute.manager raise self.value [ 756.474218] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.474218] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 756.474218] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.474218] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 756.474719] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.474719] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 756.474719] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. [ 756.474719] env[61594]: ERROR nova.compute.manager [ 756.474719] env[61594]: Traceback (most recent call last): [ 756.474719] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 756.474719] env[61594]: listener.cb(fileno) [ 756.474719] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 756.474719] env[61594]: result = function(*args, **kwargs) [ 756.474719] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.474719] env[61594]: return func(*args, **kwargs) [ 756.474719] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 756.474719] env[61594]: raise e [ 756.474719] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 756.474719] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 756.474719] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.474719] env[61594]: created_port_ids = self._update_ports_for_instance( [ 756.474719] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.474719] env[61594]: with excutils.save_and_reraise_exception(): [ 756.474719] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.474719] env[61594]: self.force_reraise() [ 756.474719] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.474719] env[61594]: raise self.value [ 756.474719] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.474719] env[61594]: updated_port = self._update_port( [ 756.474719] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.474719] env[61594]: _ensure_no_port_binding_failure(port) [ 756.474719] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.474719] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 756.475560] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. [ 756.475560] env[61594]: Removing descriptor: 17 [ 756.476455] env[61594]: ERROR nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Traceback (most recent call last): [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] yield resources [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self.driver.spawn(context, instance, image_meta, [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] vm_ref = self.build_virtual_machine(instance, [ 756.476455] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] for vif in network_info: [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return self._sync_wrapper(fn, *args, **kwargs) [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self.wait() [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self[:] = self._gt.wait() [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return self._exit_event.wait() [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 756.476830] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] result = hub.switch() [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return self.greenlet.switch() [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] result = function(*args, **kwargs) [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return func(*args, **kwargs) [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] raise e [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] nwinfo = self.network_api.allocate_for_instance( [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] created_port_ids = self._update_ports_for_instance( [ 756.477359] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] with excutils.save_and_reraise_exception(): [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self.force_reraise() [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] raise self.value [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] updated_port = self._update_port( [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] _ensure_no_port_binding_failure(port) [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] raise exception.PortBindingFailed(port_id=port['id']) [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] nova.exception.PortBindingFailed: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. [ 756.477763] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] [ 756.480034] env[61594]: INFO nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Terminating instance [ 756.482304] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-51ae3e83-1cf3-4f56-b48d-b436ae84d706" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.482304] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-51ae3e83-1cf3-4f56-b48d-b436ae84d706" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.482304] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.482304] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg a17ba9b5a1644d25851c1617b9ffd181 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.486388] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 756.487147] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 756.487479] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.487771] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 756.488050] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.488312] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 756.488612] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 756.488884] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 756.489185] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 756.489460] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 756.489731] env[61594]: DEBUG nova.virt.hardware [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 756.490695] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6cc978-c667-4e38-911d-df6784f69a06 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.494177] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a17ba9b5a1644d25851c1617b9ffd181 [ 756.502112] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2be276b-2446-4855-8de1-9f089532e31d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.539028] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Successfully created port: ff3cb204-5637-44ad-955a-535d5ce72a74 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.577914] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.581356] env[61594]: ERROR nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. [ 756.581356] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 756.581356] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 756.581356] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 756.581356] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.581356] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 756.581356] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.581356] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 756.581356] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.581356] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 756.581356] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.581356] env[61594]: ERROR nova.compute.manager raise self.value [ 756.581356] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.581356] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 756.581356] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.581356] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 756.581832] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.581832] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 756.581832] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. [ 756.581832] env[61594]: ERROR nova.compute.manager [ 756.581832] env[61594]: Traceback (most recent call last): [ 756.581832] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 756.581832] env[61594]: listener.cb(fileno) [ 756.581832] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 756.581832] env[61594]: result = function(*args, **kwargs) [ 756.581832] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.581832] env[61594]: return func(*args, **kwargs) [ 756.581832] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 756.581832] env[61594]: raise e [ 756.581832] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 756.581832] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 756.581832] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.581832] env[61594]: created_port_ids = self._update_ports_for_instance( [ 756.581832] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.581832] env[61594]: with excutils.save_and_reraise_exception(): [ 756.581832] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.581832] env[61594]: self.force_reraise() [ 756.581832] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.581832] env[61594]: raise self.value [ 756.581832] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.581832] env[61594]: updated_port = self._update_port( [ 756.581832] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.581832] env[61594]: _ensure_no_port_binding_failure(port) [ 756.581832] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.581832] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 756.582590] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. [ 756.582590] env[61594]: Removing descriptor: 21 [ 756.582590] env[61594]: ERROR nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Traceback (most recent call last): [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] yield resources [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self.driver.spawn(context, instance, image_meta, [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 756.582590] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] vm_ref = self.build_virtual_machine(instance, [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] vif_infos = vmwarevif.get_vif_info(self._session, [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] for vif in network_info: [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return self._sync_wrapper(fn, *args, **kwargs) [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self.wait() [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self[:] = self._gt.wait() [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return self._exit_event.wait() [ 756.582988] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] result = hub.switch() [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return self.greenlet.switch() [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] result = function(*args, **kwargs) [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return func(*args, **kwargs) [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] raise e [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] nwinfo = self.network_api.allocate_for_instance( [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 756.583349] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] created_port_ids = self._update_ports_for_instance( [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] with excutils.save_and_reraise_exception(): [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self.force_reraise() [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] raise self.value [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] updated_port = self._update_port( [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] _ensure_no_port_binding_failure(port) [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 756.583719] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] raise exception.PortBindingFailed(port_id=port['id']) [ 756.584780] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] nova.exception.PortBindingFailed: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. [ 756.584780] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] [ 756.584780] env[61594]: INFO nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Terminating instance [ 756.584780] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Acquiring lock "refresh_cache-807b279c-5934-4b4b-977d-c02a8dcbbdb6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.584780] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Acquired lock "refresh_cache-807b279c-5934-4b4b-977d-c02a8dcbbdb6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.584917] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.585306] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg df3b2618b6e04fbe96109307e73f0b79 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.595829] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df3b2618b6e04fbe96109307e73f0b79 [ 756.621160] env[61594]: DEBUG nova.policy [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06291742abbe4dee8092fba657b8ab91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b54f5acf42c64133afbb208929492c31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 756.672017] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.970223] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.970223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 37db9ef42e774a6bbc53c81701c6879b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 756.982592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37db9ef42e774a6bbc53c81701c6879b [ 756.983251] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Releasing lock "refresh_cache-807b279c-5934-4b4b-977d-c02a8dcbbdb6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 756.983659] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 756.983851] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 756.984380] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6bfa628d-547a-4214-ad3f-d13c18a0aa71 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.995147] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e9bbf6-d554-4438-8a6e-858dcd5489cc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.019918] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 807b279c-5934-4b4b-977d-c02a8dcbbdb6 could not be found. [ 757.020184] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 757.020385] env[61594]: INFO nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 757.020636] env[61594]: DEBUG oslo.service.loopingcall [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 757.020844] env[61594]: DEBUG nova.compute.manager [-] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 757.020938] env[61594]: DEBUG nova.network.neutron [-] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 757.269648] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.271776] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg c18dac51b88240fcb4a6d9f1fa566fce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.285010] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c18dac51b88240fcb4a6d9f1fa566fce [ 757.285754] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-51ae3e83-1cf3-4f56-b48d-b436ae84d706" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.286177] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 757.287095] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 757.287806] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-753ea7e8-e47d-4f61-a7f4-6ab7e12d4349 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.294868] env[61594]: DEBUG nova.network.neutron [-] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.295852] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg da4e71a44bd74587a73c7ee68a3a0c29 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.301575] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292a5485-8f3e-4f2d-bbd8-8feb64aba3d6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.315144] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da4e71a44bd74587a73c7ee68a3a0c29 [ 757.315919] env[61594]: DEBUG nova.network.neutron [-] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.317286] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 72fffdaf481d438eb0c3641866b34aac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.330409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72fffdaf481d438eb0c3641866b34aac [ 757.331461] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 51ae3e83-1cf3-4f56-b48d-b436ae84d706 could not be found. [ 757.331683] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 757.332078] env[61594]: INFO nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Took 0.05 seconds to destroy the instance on the hypervisor. [ 757.332537] env[61594]: DEBUG oslo.service.loopingcall [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 757.332997] env[61594]: INFO nova.compute.manager [-] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Took 0.31 seconds to deallocate network for instance. [ 757.334111] env[61594]: DEBUG nova.compute.manager [-] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 757.334111] env[61594]: DEBUG nova.network.neutron [-] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 757.338254] env[61594]: DEBUG nova.compute.claims [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 757.338532] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.339140] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.343472] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 3c8febf122ce4dd48abec052c75cc62a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.384281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c8febf122ce4dd48abec052c75cc62a [ 757.400969] env[61594]: DEBUG nova.network.neutron [-] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.401983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3de0ae1711474a5aadfcbc9b3a4db2dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.425876] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3de0ae1711474a5aadfcbc9b3a4db2dc [ 757.426405] env[61594]: DEBUG nova.network.neutron [-] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.426763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7b2238f04bf34110896dbcfaa92433b7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.437058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b2238f04bf34110896dbcfaa92433b7 [ 757.437353] env[61594]: INFO nova.compute.manager [-] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Took 0.10 seconds to deallocate network for instance. [ 757.439863] env[61594]: DEBUG nova.compute.claims [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 757.440080] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.552497] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Acquiring lock "f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.553043] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Lock "f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.553608] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 3ad9b72643e9480e816549540d37d023 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.556049] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Acquiring lock "27788608-fef5-4163-932d-be6e2f60a541" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.556049] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Lock "27788608-fef5-4163-932d-be6e2f60a541" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.556049] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg a9e457f28a8c4cffb1ad7441e02f674e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.566839] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9e457f28a8c4cffb1ad7441e02f674e [ 757.566839] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ad9b72643e9480e816549540d37d023 [ 757.567138] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 757.569171] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 3334a3c4d48d4350a2427adf6167e2f1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.570203] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 757.571970] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg c7a6c0169df845a8903171336b9caffc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.574841] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c73f162-c6a9-4d95-9d28-341e66a20e07 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.583429] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e435c1a-1246-4a19-98ed-63974db6f68e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.616595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3334a3c4d48d4350a2427adf6167e2f1 [ 757.616595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7a6c0169df845a8903171336b9caffc [ 757.621136] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2313b56-f756-4671-b837-85fb5425501f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.633073] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-084e80d7-5a0d-4a70-9ad3-120b61940a6f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.638766] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.650188] env[61594]: DEBUG nova.compute.provider_tree [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.650685] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 7d5e7a3e1e1f4c41918b7fbb04694cb3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.652314] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 757.659770] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d5e7a3e1e1f4c41918b7fbb04694cb3 [ 757.660451] env[61594]: DEBUG nova.scheduler.client.report [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 757.662811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 4854599578be458aa7c6db9485bfdea0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.679471] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4854599578be458aa7c6db9485bfdea0 [ 757.680357] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.341s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.680959] env[61594]: ERROR nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Traceback (most recent call last): [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self.driver.spawn(context, instance, image_meta, [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] vm_ref = self.build_virtual_machine(instance, [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.680959] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] for vif in network_info: [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return self._sync_wrapper(fn, *args, **kwargs) [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self.wait() [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self[:] = self._gt.wait() [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return self._exit_event.wait() [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] result = hub.switch() [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.681770] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return self.greenlet.switch() [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] result = function(*args, **kwargs) [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] return func(*args, **kwargs) [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] raise e [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] nwinfo = self.network_api.allocate_for_instance( [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] created_port_ids = self._update_ports_for_instance( [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] with excutils.save_and_reraise_exception(): [ 757.682589] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] self.force_reraise() [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] raise self.value [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] updated_port = self._update_port( [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] _ensure_no_port_binding_failure(port) [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] raise exception.PortBindingFailed(port_id=port['id']) [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] nova.exception.PortBindingFailed: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. [ 757.683939] env[61594]: ERROR nova.compute.manager [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] [ 757.685209] env[61594]: DEBUG nova.compute.utils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 757.685209] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.243s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.685209] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg e14ee671cc8743ad86962ea64c7d1bab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.685890] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Build of instance 807b279c-5934-4b4b-977d-c02a8dcbbdb6 was re-scheduled: Binding failed for port 8d3bba46-c3ed-46dc-b832-da5d946bb2f5, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 757.686344] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 757.686567] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Acquiring lock "refresh_cache-807b279c-5934-4b4b-977d-c02a8dcbbdb6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.686713] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Acquired lock "refresh_cache-807b279c-5934-4b4b-977d-c02a8dcbbdb6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.686873] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 757.687231] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg d2ae81af029f477293f4ba3b3c47ad37 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.696714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2ae81af029f477293f4ba3b3c47ad37 [ 757.729804] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e14ee671cc8743ad86962ea64c7d1bab [ 757.741852] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.888692] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9164733-1124-4045-b2bc-e83ca9786e6c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.897464] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa3cc2c9-9bb9-47eb-9b31-ad39fd6ab923 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.930039] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb8a59a-88d1-467f-92b4-a850fb1bf3d3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.936512] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7d78fe9-8949-4f9f-ba14-935c295cc968 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.953515] env[61594]: DEBUG nova.compute.provider_tree [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.954185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 7420ffa5af1e469e925cd7f916f67e59 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.965484] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7420ffa5af1e469e925cd7f916f67e59 [ 757.966628] env[61594]: DEBUG nova.scheduler.client.report [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 757.969387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 7a5c600fe1f643ba93f807bae3fbd711 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.986135] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a5c600fe1f643ba93f807bae3fbd711 [ 757.987385] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.304s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.987924] env[61594]: ERROR nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Traceback (most recent call last): [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self.driver.spawn(context, instance, image_meta, [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] vm_ref = self.build_virtual_machine(instance, [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.987924] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] for vif in network_info: [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return self._sync_wrapper(fn, *args, **kwargs) [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self.wait() [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self[:] = self._gt.wait() [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return self._exit_event.wait() [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] result = hub.switch() [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 757.988274] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return self.greenlet.switch() [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] result = function(*args, **kwargs) [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] return func(*args, **kwargs) [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] raise e [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] nwinfo = self.network_api.allocate_for_instance( [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] created_port_ids = self._update_ports_for_instance( [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] with excutils.save_and_reraise_exception(): [ 757.988613] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] self.force_reraise() [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] raise self.value [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] updated_port = self._update_port( [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] _ensure_no_port_binding_failure(port) [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] raise exception.PortBindingFailed(port_id=port['id']) [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] nova.exception.PortBindingFailed: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. [ 757.988923] env[61594]: ERROR nova.compute.manager [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] [ 757.989214] env[61594]: DEBUG nova.compute.utils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 757.990252] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.352s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.991896] env[61594]: INFO nova.compute.claims [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.993649] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 838b4182fb334bc4bc2f39390a162fac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 757.995300] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Build of instance 51ae3e83-1cf3-4f56-b48d-b436ae84d706 was re-scheduled: Binding failed for port 010a6d62-e081-47f2-83de-049e88757d9d, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 757.995848] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 757.996125] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-51ae3e83-1cf3-4f56-b48d-b436ae84d706" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.996348] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-51ae3e83-1cf3-4f56-b48d-b436ae84d706" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.996632] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 757.997083] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg c3d313f24f6a4ae7bdba1e6b9be6f3f7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.002038] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.002403] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.003671] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3d313f24f6a4ae7bdba1e6b9be6f3f7 [ 758.020413] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.020918] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 24d8846f82594ab697543068e85f880d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.032980] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24d8846f82594ab697543068e85f880d [ 758.033594] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Releasing lock "refresh_cache-807b279c-5934-4b4b-977d-c02a8dcbbdb6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.033934] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 758.034012] env[61594]: DEBUG nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 758.034144] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 758.044897] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 838b4182fb334bc4bc2f39390a162fac [ 758.046738] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 073f26a4dddc4d47a9fbd671ffef7f70 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.054966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 073f26a4dddc4d47a9fbd671ffef7f70 [ 758.080138] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.105544] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.106184] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 185c254b11e04ffab85ffe993bb97bae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.114936] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 185c254b11e04ffab85ffe993bb97bae [ 758.115487] env[61594]: DEBUG nova.network.neutron [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.115950] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 65889b96bec7452f91402a1d202dae5e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.128604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65889b96bec7452f91402a1d202dae5e [ 758.129305] env[61594]: INFO nova.compute.manager [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] [instance: 807b279c-5934-4b4b-977d-c02a8dcbbdb6] Took 0.09 seconds to deallocate network for instance. [ 758.131622] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 3e75926b485043a7bcd9b28d9643c35f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.170360] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e75926b485043a7bcd9b28d9643c35f [ 758.173068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg 72e1e992afd44f80b152632daeff98ae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.216701] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72e1e992afd44f80b152632daeff98ae [ 758.240067] env[61594]: INFO nova.scheduler.client.report [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Deleted allocations for instance 807b279c-5934-4b4b-977d-c02a8dcbbdb6 [ 758.249028] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073b0852-2c72-4f33-ba5a-f1a902ce502f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.253179] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Expecting reply to msg acee546473ee479b9121c74e0138e7a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.257260] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84dc3c69-e0bb-424a-8ed8-85f06dd0aab6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.291888] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acee546473ee479b9121c74e0138e7a5 [ 758.292734] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d612945-e56a-4220-bed8-9dc166b58f9b tempest-ServerDiagnosticsTest-448951502 tempest-ServerDiagnosticsTest-448951502-project-member] Lock "807b279c-5934-4b4b-977d-c02a8dcbbdb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.007s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.293452] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a3039c-b78b-4af8-b228-9cf6c872e26b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.296487] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 192ecb2fd0b743c4b258d6907a0f8b13 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.303478] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fc7756-40e2-4480-8467-e8dc2c1c860d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.309402] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 192ecb2fd0b743c4b258d6907a0f8b13 [ 758.309904] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 758.311941] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 20bde83834d1485fb9df88db4de2f22d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.325027] env[61594]: DEBUG nova.compute.provider_tree [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.325350] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 36842c8d6d6f4b8c9005f19c22d54a03 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.334559] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36842c8d6d6f4b8c9005f19c22d54a03 [ 758.335647] env[61594]: DEBUG nova.scheduler.client.report [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 758.338225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg ebb82a901ca449ef94494c7deb6c717b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.362205] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebb82a901ca449ef94494c7deb6c717b [ 758.363261] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.373s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.366018] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 758.366018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg d7293d487a9e498cb4c97fbca025360f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.367543] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.715s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.369535] env[61594]: INFO nova.compute.claims [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.372359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 3c29fbc15b504f17b1c24bc71ef04450 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.374073] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20bde83834d1485fb9df88db4de2f22d [ 758.402383] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 758.440761] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7293d487a9e498cb4c97fbca025360f [ 758.442354] env[61594]: DEBUG nova.compute.utils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 758.443136] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 08eb102712ac4db6b01a76cde8c7cad6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.444224] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c29fbc15b504f17b1c24bc71ef04450 [ 758.444688] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 758.444874] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 758.448895] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg b2fee1ed0b4848a895ad5a0920c588f6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.457528] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08eb102712ac4db6b01a76cde8c7cad6 [ 758.458146] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 758.463019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 46fae250cb95495f9a2a12a831513898 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.463019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b2fee1ed0b4848a895ad5a0920c588f6 [ 758.480981] env[61594]: ERROR nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. [ 758.480981] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 758.480981] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 758.480981] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 758.480981] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 758.480981] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 758.480981] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 758.480981] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 758.480981] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.480981] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 758.480981] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.480981] env[61594]: ERROR nova.compute.manager raise self.value [ 758.480981] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 758.480981] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 758.480981] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.480981] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 758.481621] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.481621] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 758.481621] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. [ 758.481621] env[61594]: ERROR nova.compute.manager [ 758.481621] env[61594]: Traceback (most recent call last): [ 758.481621] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 758.481621] env[61594]: listener.cb(fileno) [ 758.481621] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 758.481621] env[61594]: result = function(*args, **kwargs) [ 758.481621] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 758.481621] env[61594]: return func(*args, **kwargs) [ 758.481621] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 758.481621] env[61594]: raise e [ 758.481621] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 758.481621] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 758.481621] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 758.481621] env[61594]: created_port_ids = self._update_ports_for_instance( [ 758.481621] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 758.481621] env[61594]: with excutils.save_and_reraise_exception(): [ 758.481621] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.481621] env[61594]: self.force_reraise() [ 758.481621] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.481621] env[61594]: raise self.value [ 758.481621] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 758.481621] env[61594]: updated_port = self._update_port( [ 758.481621] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.481621] env[61594]: _ensure_no_port_binding_failure(port) [ 758.481621] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.481621] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 758.482953] env[61594]: nova.exception.PortBindingFailed: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. [ 758.482953] env[61594]: Removing descriptor: 25 [ 758.482953] env[61594]: ERROR nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] Traceback (most recent call last): [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] yield resources [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self.driver.spawn(context, instance, image_meta, [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 758.482953] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] vm_ref = self.build_virtual_machine(instance, [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] vif_infos = vmwarevif.get_vif_info(self._session, [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] for vif in network_info: [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return self._sync_wrapper(fn, *args, **kwargs) [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self.wait() [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self[:] = self._gt.wait() [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return self._exit_event.wait() [ 758.483267] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] result = hub.switch() [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return self.greenlet.switch() [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] result = function(*args, **kwargs) [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return func(*args, **kwargs) [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] raise e [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] nwinfo = self.network_api.allocate_for_instance( [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 758.483583] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] created_port_ids = self._update_ports_for_instance( [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] with excutils.save_and_reraise_exception(): [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self.force_reraise() [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] raise self.value [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] updated_port = self._update_port( [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] _ensure_no_port_binding_failure(port) [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 758.483952] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] raise exception.PortBindingFailed(port_id=port['id']) [ 758.484308] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] nova.exception.PortBindingFailed: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. [ 758.484308] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] [ 758.484308] env[61594]: INFO nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Terminating instance [ 758.485160] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Acquiring lock "refresh_cache-681fb733-cd37-4f73-a487-e4856206907f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 758.485160] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Acquired lock "refresh_cache-681fb733-cd37-4f73-a487-e4856206907f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.485277] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.485662] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 59d428e8ba074d35bf3b52e48784a2ae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.498880] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59d428e8ba074d35bf3b52e48784a2ae [ 758.523451] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46fae250cb95495f9a2a12a831513898 [ 758.526066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 95d81b5e3d6047db99c0352ae28b6ad8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.556334] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95d81b5e3d6047db99c0352ae28b6ad8 [ 758.557810] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 758.566462] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.592801] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 758.593463] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 758.593463] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.593595] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 758.593658] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.593822] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 758.594070] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 758.594268] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 758.594447] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 758.594610] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 758.594779] env[61594]: DEBUG nova.virt.hardware [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 758.595696] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5644a9e7-405e-4ee9-a9d9-dba6b59ee8e2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.606910] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127e6afe-8ef3-4cea-99f1-75dac925e6e1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.668288] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb38af4-55a6-42c1-9dbf-4822befb9626 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.676401] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc97082b-8a67-47ec-9aeb-c159f22c8054 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.709454] env[61594]: DEBUG nova.policy [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0e5924ff92d443cd9206bdfd82447903', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c18723dc952a4233846634205ce5bc23', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 758.713443] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ff5440-b4d2-4d78-b413-8bdce0adf136 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.721469] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca175eb1-19da-46a5-9e3f-552aef8a4555 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.736171] env[61594]: DEBUG nova.compute.provider_tree [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.736612] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg f1f851bd04494516926deab0c4b4b58f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.745580] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1f851bd04494516926deab0c4b4b58f [ 758.746602] env[61594]: DEBUG nova.scheduler.client.report [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 758.751359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg b20c2f13785846529f7d14c799c4c565 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.762310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b20c2f13785846529f7d14c799c4c565 [ 758.763422] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.396s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 758.763882] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 758.767136] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 331a70b0bcc54c60811ab03c71fe41e6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.767136] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.364s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 758.768050] env[61594]: INFO nova.compute.claims [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 758.769609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 9656a5ac7e9340d288eeca9e0833a175 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.814026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 331a70b0bcc54c60811ab03c71fe41e6 [ 758.814026] env[61594]: DEBUG nova.compute.utils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 758.814026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg fda98738d5f0486ba943eb885db7f023 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.815135] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.815449] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 0dbcbe3590e44c459bb368d4597b3926 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.816206] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 758.816873] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 758.818884] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9656a5ac7e9340d288eeca9e0833a175 [ 758.820574] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 7dd65f3d64dc454f88d637de2b7cd4d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.826592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dbcbe3590e44c459bb368d4597b3926 [ 758.827113] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-51ae3e83-1cf3-4f56-b48d-b436ae84d706" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.827628] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 758.827628] env[61594]: DEBUG nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 758.827965] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 758.830064] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fda98738d5f0486ba943eb885db7f023 [ 758.830532] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dd65f3d64dc454f88d637de2b7cd4d5 [ 758.831322] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 758.832790] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 1b4ddb46b920419fbeccaa7c42929a7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.868614] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b4ddb46b920419fbeccaa7c42929a7e [ 758.872015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg d5bf093ffd6d497388e81bb6952e0e03 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.875813] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Successfully created port: dd2bb230-848c-4e9a-8343-59436d2fbda8 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.907778] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5bf093ffd6d497388e81bb6952e0e03 [ 758.909277] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.910026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 15f9ee84785b4bd58b98a9bf1ecbde11 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.912220] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 758.918037] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15f9ee84785b4bd58b98a9bf1ecbde11 [ 758.918154] env[61594]: DEBUG nova.network.neutron [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.918589] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg aff28b365453453f8dbbbc8178e7859e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.932501] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aff28b365453453f8dbbbc8178e7859e [ 758.933383] env[61594]: INFO nova.compute.manager [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 51ae3e83-1cf3-4f56-b48d-b436ae84d706] Took 0.11 seconds to deallocate network for instance. [ 758.935042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 9f424af1ff1e4398975075e866b842fa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 758.948780] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 758.949053] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 758.949224] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 758.949749] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 758.949749] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 758.949848] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 758.950546] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 758.951625] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 758.951625] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 758.951625] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 758.951625] env[61594]: DEBUG nova.virt.hardware [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 758.953163] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5699e3e-5058-4a33-a304-d6fada3e5424 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.968846] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9dd1561-5cbd-4686-9582-2ad10c7a6725 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.985899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f424af1ff1e4398975075e866b842fa [ 758.988752] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 472ea9cf76104a198069f84c321f8e58 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.025276] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.025674] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 1fb14f7c20a746198d22316cde86fe7d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.029760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 472ea9cf76104a198069f84c321f8e58 [ 759.035643] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc73cb6a-3c5f-4433-a019-8b4408df33e0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.039064] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fb14f7c20a746198d22316cde86fe7d [ 759.042277] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Releasing lock "refresh_cache-681fb733-cd37-4f73-a487-e4856206907f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.042277] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 759.042277] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 759.042277] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b81b39e-c611-445e-a463-32f3028e4a92 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.048019] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998d3a3d-0434-4ad3-bc5d-c037a2753f2f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.054300] env[61594]: DEBUG nova.policy [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cdda174a6ea54d4984293438287d97a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0ac54b53da9b4c9d99b34169cc521682', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 759.057602] env[61594]: ERROR nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. [ 759.057602] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 759.057602] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 759.057602] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 759.057602] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.057602] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 759.057602] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.057602] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 759.057602] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.057602] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 759.057602] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.057602] env[61594]: ERROR nova.compute.manager raise self.value [ 759.057602] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.057602] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 759.057602] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.057602] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 759.058094] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.058094] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 759.058094] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. [ 759.058094] env[61594]: ERROR nova.compute.manager [ 759.058094] env[61594]: Traceback (most recent call last): [ 759.058094] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 759.058094] env[61594]: listener.cb(fileno) [ 759.058094] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 759.058094] env[61594]: result = function(*args, **kwargs) [ 759.058094] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 759.058094] env[61594]: return func(*args, **kwargs) [ 759.058094] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 759.058094] env[61594]: raise e [ 759.058094] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 759.058094] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 759.058094] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.058094] env[61594]: created_port_ids = self._update_ports_for_instance( [ 759.058094] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.058094] env[61594]: with excutils.save_and_reraise_exception(): [ 759.058094] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.058094] env[61594]: self.force_reraise() [ 759.058094] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.058094] env[61594]: raise self.value [ 759.058094] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.058094] env[61594]: updated_port = self._update_port( [ 759.058094] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.058094] env[61594]: _ensure_no_port_binding_failure(port) [ 759.058094] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.058094] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 759.058898] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. [ 759.058898] env[61594]: Removing descriptor: 23 [ 759.059176] env[61594]: INFO nova.scheduler.client.report [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Deleted allocations for instance 51ae3e83-1cf3-4f56-b48d-b436ae84d706 [ 759.071108] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b440b1-0047-476f-bf17-f971d8ceb2e8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.084971] env[61594]: ERROR nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Traceback (most recent call last): [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] yield resources [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self.driver.spawn(context, instance, image_meta, [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] vm_ref = self.build_virtual_machine(instance, [ 759.084971] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] for vif in network_info: [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return self._sync_wrapper(fn, *args, **kwargs) [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self.wait() [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self[:] = self._gt.wait() [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return self._exit_event.wait() [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 759.086084] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] result = hub.switch() [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return self.greenlet.switch() [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] result = function(*args, **kwargs) [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return func(*args, **kwargs) [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] raise e [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] nwinfo = self.network_api.allocate_for_instance( [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] created_port_ids = self._update_ports_for_instance( [ 759.086430] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] with excutils.save_and_reraise_exception(): [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self.force_reraise() [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] raise self.value [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] updated_port = self._update_port( [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] _ensure_no_port_binding_failure(port) [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] raise exception.PortBindingFailed(port_id=port['id']) [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] nova.exception.PortBindingFailed: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. [ 759.086774] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] [ 759.087127] env[61594]: INFO nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Terminating instance [ 759.087898] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 15e0441c64ab417ea79508215a4744ec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.088929] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Acquiring lock "refresh_cache-d16dff71-2dab-469a-8cb1-40ed086c42d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.089612] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Acquired lock "refresh_cache-d16dff71-2dab-469a-8cb1-40ed086c42d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.089612] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 759.089729] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 9237ddb659fd490f859f81827e15fd84 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.116829] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9237ddb659fd490f859f81827e15fd84 [ 759.117373] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15e0441c64ab417ea79508215a4744ec [ 759.120702] env[61594]: DEBUG oslo_concurrency.lockutils [None req-eaa2de39-c1c4-4100-903f-cc88de1d1446 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "51ae3e83-1cf3-4f56-b48d-b436ae84d706" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.830s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.121856] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da6aba44-24fe-422c-a1cd-ec097058bee6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.136040] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216efdea-12f1-482f-8cd9-188c80c919cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.141999] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 681fb733-cd37-4f73-a487-e4856206907f could not be found. [ 759.141999] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 759.141999] env[61594]: INFO nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Took 0.10 seconds to destroy the instance on the hypervisor. [ 759.141999] env[61594]: DEBUG oslo.service.loopingcall [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.141999] env[61594]: DEBUG nova.compute.manager [-] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 759.142228] env[61594]: DEBUG nova.network.neutron [-] [instance: 681fb733-cd37-4f73-a487-e4856206907f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 759.153029] env[61594]: DEBUG nova.compute.provider_tree [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.153029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 379d9df604b94128b05986b4c85327f0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.160783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 379d9df604b94128b05986b4c85327f0 [ 759.161754] env[61594]: DEBUG nova.scheduler.client.report [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 759.164145] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 22c57d25989d44599563ac5e3af3263c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.177740] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22c57d25989d44599563ac5e3af3263c [ 759.178629] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.412s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.179128] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 759.181185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg ad2995385f57400ebcdfc5fb29e45966 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.192203] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.226635] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad2995385f57400ebcdfc5fb29e45966 [ 759.228672] env[61594]: DEBUG nova.compute.utils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 759.229275] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg b61b7452a4334adfbb11767506a042c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.230344] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 759.241421] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b61b7452a4334adfbb11767506a042c5 [ 759.242022] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 759.243760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 587e7a3014514f10b8c3d3eab5c21de3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.251565] env[61594]: DEBUG nova.network.neutron [-] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.252047] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c8f85f20dcad49469771d41425b8ed9a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.280366] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8f85f20dcad49469771d41425b8ed9a [ 759.280366] env[61594]: DEBUG nova.network.neutron [-] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.280366] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3ffce8874521420cbd7cab8800d76374 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.298046] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 587e7a3014514f10b8c3d3eab5c21de3 [ 759.298568] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ffce8874521420cbd7cab8800d76374 [ 759.304588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 7deafcde603e4b0aaf95d0b9b32846ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.304687] env[61594]: INFO nova.compute.manager [-] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Took 0.16 seconds to deallocate network for instance. [ 759.307723] env[61594]: DEBUG nova.compute.claims [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 759.307895] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 759.308148] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.309864] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 31c93a4e6a4447feaa32af8e4d5ae7a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.344873] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31c93a4e6a4447feaa32af8e4d5ae7a7 [ 759.350581] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7deafcde603e4b0aaf95d0b9b32846ff [ 759.351748] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 759.359055] env[61594]: ERROR nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. [ 759.359055] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 759.359055] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 759.359055] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 759.359055] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.359055] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 759.359055] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.359055] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 759.359055] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.359055] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 759.359055] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.359055] env[61594]: ERROR nova.compute.manager raise self.value [ 759.359055] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.359055] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 759.359055] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.359055] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 759.359482] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.359482] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 759.359482] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. [ 759.359482] env[61594]: ERROR nova.compute.manager [ 759.359482] env[61594]: Traceback (most recent call last): [ 759.359482] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 759.359482] env[61594]: listener.cb(fileno) [ 759.359482] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 759.359482] env[61594]: result = function(*args, **kwargs) [ 759.359482] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 759.359482] env[61594]: return func(*args, **kwargs) [ 759.359482] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 759.359482] env[61594]: raise e [ 759.359482] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 759.359482] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 759.359482] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.359482] env[61594]: created_port_ids = self._update_ports_for_instance( [ 759.359482] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.359482] env[61594]: with excutils.save_and_reraise_exception(): [ 759.359482] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.359482] env[61594]: self.force_reraise() [ 759.359482] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.359482] env[61594]: raise self.value [ 759.359482] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.359482] env[61594]: updated_port = self._update_port( [ 759.359482] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.359482] env[61594]: _ensure_no_port_binding_failure(port) [ 759.359482] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.359482] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 759.360207] env[61594]: nova.exception.PortBindingFailed: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. [ 759.360207] env[61594]: Removing descriptor: 20 [ 759.360462] env[61594]: ERROR nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Traceback (most recent call last): [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] yield resources [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self.driver.spawn(context, instance, image_meta, [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] vm_ref = self.build_virtual_machine(instance, [ 759.360462] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] vif_infos = vmwarevif.get_vif_info(self._session, [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] for vif in network_info: [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return self._sync_wrapper(fn, *args, **kwargs) [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self.wait() [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self[:] = self._gt.wait() [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return self._exit_event.wait() [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 759.360774] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] result = hub.switch() [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return self.greenlet.switch() [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] result = function(*args, **kwargs) [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return func(*args, **kwargs) [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] raise e [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] nwinfo = self.network_api.allocate_for_instance( [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] created_port_ids = self._update_ports_for_instance( [ 759.361162] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] with excutils.save_and_reraise_exception(): [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self.force_reraise() [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] raise self.value [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] updated_port = self._update_port( [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] _ensure_no_port_binding_failure(port) [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] raise exception.PortBindingFailed(port_id=port['id']) [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] nova.exception.PortBindingFailed: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. [ 759.361520] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] [ 759.361867] env[61594]: INFO nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Terminating instance [ 759.366189] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-45a40160-c224-4a8f-8e92-26d770d4ff4f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.366347] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-45a40160-c224-4a8f-8e92-26d770d4ff4f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.366519] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 759.366936] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 531dfff335234a0fa203b2f4d6203950 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.382832] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 531dfff335234a0fa203b2f4d6203950 [ 759.396030] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 759.396030] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 759.396030] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.396256] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 759.396295] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.396727] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 759.396727] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 759.396829] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 759.396950] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 759.397129] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 759.397310] env[61594]: DEBUG nova.virt.hardware [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 759.398471] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4734006b-0501-488e-80fe-b792684d5fb6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.410677] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71bf6d6-f30a-49ab-8dba-833ff6b196d9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.424900] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.430817] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Creating folder: Project (fae9b11612ad41c39f02b336b91bfb37). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.434032] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-579c7fd9-6fdb-4e59-af1c-48f26f1bdfd0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.445527] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Created folder: Project (fae9b11612ad41c39f02b336b91bfb37) in parent group-v277030. [ 759.445673] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Creating folder: Instances. Parent ref: group-v277037. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 759.445916] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d010d9fc-8fcd-411e-b984-67572df0739a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.457274] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.459453] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Created folder: Instances in parent group-v277037. [ 759.459674] env[61594]: DEBUG oslo.service.loopingcall [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.459858] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 759.460378] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2847faa2-83b1-4912-8212-95844154263b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.477267] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.477267] env[61594]: value = "task-1291388" [ 759.477267] env[61594]: _type = "Task" [ 759.477267] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.487463] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291388, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.560653] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfac4306-55ce-43c0-8e7e-be7309ffc0e5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.572754] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b995de-7c87-41ef-9db1-ba675b86164c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.610235] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91176195-1c45-44fc-8a80-8fd5417187f4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.617924] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d319a9a5-c113-4a36-8e75-b7bb86b3777c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.635586] env[61594]: DEBUG nova.compute.provider_tree [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 759.636149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 197c8cc1577c401eb2468104014868e7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.649931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 197c8cc1577c401eb2468104014868e7 [ 759.651092] env[61594]: DEBUG nova.scheduler.client.report [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 759.658743] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 5e40afab103a461192e474539dbb7045 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.677067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e40afab103a461192e474539dbb7045 [ 759.677960] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.370s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.678646] env[61594]: ERROR nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] Traceback (most recent call last): [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self.driver.spawn(context, instance, image_meta, [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] vm_ref = self.build_virtual_machine(instance, [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] vif_infos = vmwarevif.get_vif_info(self._session, [ 759.678646] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] for vif in network_info: [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return self._sync_wrapper(fn, *args, **kwargs) [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self.wait() [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self[:] = self._gt.wait() [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return self._exit_event.wait() [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] result = hub.switch() [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 759.679788] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return self.greenlet.switch() [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] result = function(*args, **kwargs) [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] return func(*args, **kwargs) [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] raise e [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] nwinfo = self.network_api.allocate_for_instance( [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] created_port_ids = self._update_ports_for_instance( [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] with excutils.save_and_reraise_exception(): [ 759.680326] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] self.force_reraise() [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] raise self.value [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] updated_port = self._update_port( [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] _ensure_no_port_binding_failure(port) [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] raise exception.PortBindingFailed(port_id=port['id']) [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] nova.exception.PortBindingFailed: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. [ 759.680640] env[61594]: ERROR nova.compute.manager [instance: 681fb733-cd37-4f73-a487-e4856206907f] [ 759.680944] env[61594]: DEBUG nova.compute.utils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 759.682090] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Build of instance 681fb733-cd37-4f73-a487-e4856206907f was re-scheduled: Binding failed for port cf2a5b97-befe-4276-9dc3-bea4d29d0e6a, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 759.682358] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 759.683361] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Acquiring lock "refresh_cache-681fb733-cd37-4f73-a487-e4856206907f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 759.683361] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Acquired lock "refresh_cache-681fb733-cd37-4f73-a487-e4856206907f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.683361] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 759.683508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 420dcdfbdd4442e291fa82d7ba2d5989 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.693158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 420dcdfbdd4442e291fa82d7ba2d5989 [ 759.753424] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.954954] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.955511] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 0f2b90e686c04bb78b088fd7eecbb49d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 759.965815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f2b90e686c04bb78b088fd7eecbb49d [ 759.966435] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Releasing lock "refresh_cache-d16dff71-2dab-469a-8cb1-40ed086c42d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.966821] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 759.967112] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 759.967558] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a08f661-c59b-4fc6-84ad-5d1aaff882bb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.978115] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d1368e-eb20-463c-832d-a8b66fd5867c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.005177] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d16dff71-2dab-469a-8cb1-40ed086c42d1 could not be found. [ 760.005417] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 760.005604] env[61594]: INFO nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 760.005848] env[61594]: DEBUG oslo.service.loopingcall [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.008923] env[61594]: DEBUG nova.compute.manager [-] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 760.009043] env[61594]: DEBUG nova.network.neutron [-] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 760.010689] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291388, 'name': CreateVM_Task, 'duration_secs': 0.287164} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.010846] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 760.011525] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.011880] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.012058] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 760.012229] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6dbc159-dcfd-4b1f-b5f8-ce0da285fde5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.016806] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Waiting for the task: (returnval){ [ 760.016806] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52d1eaee-1631-65c4-b448-ca40ab8718df" [ 760.016806] env[61594]: _type = "Task" [ 760.016806] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.026525] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52d1eaee-1631-65c4-b448-ca40ab8718df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.081622] env[61594]: DEBUG nova.network.neutron [-] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.082164] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg db50cb59da944f46ae2da9d297193be1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.091034] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db50cb59da944f46ae2da9d297193be1 [ 760.091390] env[61594]: DEBUG nova.network.neutron [-] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.091783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f3d8e34c22db4616a49238ee58649c1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.099809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3d8e34c22db4616a49238ee58649c1a [ 760.101050] env[61594]: INFO nova.compute.manager [-] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Took 0.09 seconds to deallocate network for instance. [ 760.102353] env[61594]: DEBUG nova.compute.claims [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.102550] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.102793] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.104893] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 30ff9679930a482d9d3670929981ac81 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.145589] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30ff9679930a482d9d3670929981ac81 [ 760.173600] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.174209] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 8b22b5610f80479fbd38a05d250ea193 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.198460] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b22b5610f80479fbd38a05d250ea193 [ 760.199450] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-45a40160-c224-4a8f-8e92-26d770d4ff4f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.199734] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 760.200481] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 760.200551] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-080a9891-25f6-4e0e-973f-b3dbbc75e4f3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.214968] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0c3f4c-db3b-4c5e-b686-dd92eec51643 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.231305] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.231305] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg b42a2d4d00bf4c4ca413c1eaa4828ab7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.244946] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 45a40160-c224-4a8f-8e92-26d770d4ff4f could not be found. [ 760.244946] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 760.244946] env[61594]: INFO nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 760.245199] env[61594]: DEBUG oslo.service.loopingcall [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 760.245847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b42a2d4d00bf4c4ca413c1eaa4828ab7 [ 760.246124] env[61594]: DEBUG nova.compute.manager [-] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 760.246233] env[61594]: DEBUG nova.network.neutron [-] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 760.248033] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Releasing lock "refresh_cache-681fb733-cd37-4f73-a487-e4856206907f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.250137] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 760.250137] env[61594]: DEBUG nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 760.250137] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 760.289123] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.289601] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 4c159d28e1de4253bd17914643067a14 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.304036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c159d28e1de4253bd17914643067a14 [ 760.304992] env[61594]: DEBUG nova.network.neutron [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.305989] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg e790c20f397e483699c1c88a71af97e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.310945] env[61594]: DEBUG nova.network.neutron [-] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.311901] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b60a108a65ca4667927c6917f8d8b134 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.319276] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b60a108a65ca4667927c6917f8d8b134 [ 760.319276] env[61594]: DEBUG nova.network.neutron [-] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.322361] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 71b6a1992af24322ae8590bb42aa84ce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.324135] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e790c20f397e483699c1c88a71af97e1 [ 760.324135] env[61594]: INFO nova.compute.manager [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] [instance: 681fb733-cd37-4f73-a487-e4856206907f] Took 0.08 seconds to deallocate network for instance. [ 760.327190] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 729647253fc143318906fadd29e59af1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.337797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 71b6a1992af24322ae8590bb42aa84ce [ 760.338375] env[61594]: INFO nova.compute.manager [-] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Took 0.09 seconds to deallocate network for instance. [ 760.340634] env[61594]: DEBUG nova.compute.claims [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.341240] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.346619] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1577362-a729-4917-8ea0-dfd10e7059f8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.355609] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc765b6-6766-485f-86e6-3c808f237134 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.394901] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 729647253fc143318906fadd29e59af1 [ 760.398330] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 319dc2baabad4473a32dae4822b0a88e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.403798] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e98e75b-68a2-4d0a-8859-b3a893e22742 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.409866] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db3c3d8-6e9b-4eac-9e54-4892e96e1689 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.427353] env[61594]: DEBUG nova.compute.provider_tree [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.427535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 9bf1718ab64440d0adf150fd639b3d05 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.439916] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 319dc2baabad4473a32dae4822b0a88e [ 760.443446] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bf1718ab64440d0adf150fd639b3d05 [ 760.444801] env[61594]: DEBUG nova.scheduler.client.report [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 760.447114] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg b6fcdec4ddf7492f95f190c0864e5741 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.463391] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6fcdec4ddf7492f95f190c0864e5741 [ 760.464246] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.361s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.465033] env[61594]: ERROR nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Traceback (most recent call last): [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self.driver.spawn(context, instance, image_meta, [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] vm_ref = self.build_virtual_machine(instance, [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 760.465033] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] for vif in network_info: [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return self._sync_wrapper(fn, *args, **kwargs) [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self.wait() [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self[:] = self._gt.wait() [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return self._exit_event.wait() [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] result = hub.switch() [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 760.465409] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return self.greenlet.switch() [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] result = function(*args, **kwargs) [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] return func(*args, **kwargs) [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] raise e [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] nwinfo = self.network_api.allocate_for_instance( [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] created_port_ids = self._update_ports_for_instance( [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] with excutils.save_and_reraise_exception(): [ 760.465756] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] self.force_reraise() [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] raise self.value [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] updated_port = self._update_port( [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] _ensure_no_port_binding_failure(port) [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] raise exception.PortBindingFailed(port_id=port['id']) [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] nova.exception.PortBindingFailed: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. [ 760.466110] env[61594]: ERROR nova.compute.manager [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] [ 760.466421] env[61594]: DEBUG nova.compute.utils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 760.466773] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.126s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 760.468874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 3ed8d20874b84e95ba2d7fe18b4fa367 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.470653] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Build of instance d16dff71-2dab-469a-8cb1-40ed086c42d1 was re-scheduled: Binding failed for port 8dfc2970-8f62-41a9-be95-d63bee3b3b33, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 760.471339] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 760.471339] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Acquiring lock "refresh_cache-d16dff71-2dab-469a-8cb1-40ed086c42d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.471465] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Acquired lock "refresh_cache-d16dff71-2dab-469a-8cb1-40ed086c42d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.471629] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 760.472007] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 96e6b6c1e0644ddb9b936a43abc511c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.476147] env[61594]: INFO nova.scheduler.client.report [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Deleted allocations for instance 681fb733-cd37-4f73-a487-e4856206907f [ 760.480456] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Successfully created port: 65cc7798-0ef5-4fd5-8396-eed3859fc1af {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.482818] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Expecting reply to msg 9f38b496f0044c30bdd0a104cef26920 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.483773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96e6b6c1e0644ddb9b936a43abc511c3 [ 760.500153] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f38b496f0044c30bdd0a104cef26920 [ 760.500153] env[61594]: DEBUG oslo_concurrency.lockutils [None req-66694e96-b294-4468-97c5-98a281230e3a tempest-ImagesOneServerTestJSON-327664204 tempest-ImagesOneServerTestJSON-327664204-project-member] Lock "681fb733-cd37-4f73-a487-e4856206907f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.561s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.530282] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 760.531902] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 760.531902] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.531902] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.534075] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 760.535492] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f449afbc-eecb-4080-bc6f-41a5acc33e65 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.538907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ed8d20874b84e95ba2d7fe18b4fa367 [ 760.549928] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 760.553529] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 760.553712] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 760.554472] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a7b5bed-7828-4e63-bf92-4408bd331c23 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.559730] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Waiting for the task: (returnval){ [ 760.559730] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52f683b0-8416-57b2-77bb-1548cc9fa4a9" [ 760.559730] env[61594]: _type = "Task" [ 760.559730] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.571923] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52f683b0-8416-57b2-77bb-1548cc9fa4a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.708128] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d85b67-2fd8-4436-a6a2-d6a3316dcf7d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.717605] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de52dc17-1bc0-4a22-b667-88e7d06e84b6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.752837] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01145bf1-f1b2-41f6-99e1-e774440f9511 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.757462] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Successfully created port: 68b29d7a-88f7-4130-a084-890a355e3dbb {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.763182] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffe736c-4f56-4dbf-a36d-cc56e7686225 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.780652] env[61594]: DEBUG nova.compute.provider_tree [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.780652] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 06c90dde3f5b48edaf467107a988b7f5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.790544] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06c90dde3f5b48edaf467107a988b7f5 [ 760.791050] env[61594]: DEBUG nova.scheduler.client.report [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 760.793428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 3db33b22a3b246158a0bb83139cc9d47 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.808633] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3db33b22a3b246158a0bb83139cc9d47 [ 760.809706] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.343s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.810308] env[61594]: ERROR nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Traceback (most recent call last): [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self.driver.spawn(context, instance, image_meta, [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] vm_ref = self.build_virtual_machine(instance, [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] vif_infos = vmwarevif.get_vif_info(self._session, [ 760.810308] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] for vif in network_info: [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return self._sync_wrapper(fn, *args, **kwargs) [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self.wait() [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self[:] = self._gt.wait() [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return self._exit_event.wait() [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] result = hub.switch() [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 760.810630] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return self.greenlet.switch() [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] result = function(*args, **kwargs) [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] return func(*args, **kwargs) [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] raise e [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] nwinfo = self.network_api.allocate_for_instance( [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] created_port_ids = self._update_ports_for_instance( [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] with excutils.save_and_reraise_exception(): [ 760.810965] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] self.force_reraise() [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] raise self.value [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] updated_port = self._update_port( [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] _ensure_no_port_binding_failure(port) [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] raise exception.PortBindingFailed(port_id=port['id']) [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] nova.exception.PortBindingFailed: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. [ 760.811370] env[61594]: ERROR nova.compute.manager [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] [ 760.811695] env[61594]: DEBUG nova.compute.utils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 760.813074] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Build of instance 45a40160-c224-4a8f-8e92-26d770d4ff4f was re-scheduled: Binding failed for port e4f28593-3f76-493e-acd6-31f97463f969, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 760.813525] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 760.815356] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-45a40160-c224-4a8f-8e92-26d770d4ff4f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 760.815356] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-45a40160-c224-4a8f-8e92-26d770d4ff4f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.815356] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 760.815356] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 683eb261eb574ff8b0ae4374cec0eb8c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 760.829277] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 683eb261eb574ff8b0ae4374cec0eb8c [ 761.073782] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 761.073983] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Creating directory with path [datastore1] vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 761.074512] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f838a5a-e27b-47a6-ac77-157ae7ccc679 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.080013] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 761.095465] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Created directory with path [datastore1] vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 761.095760] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Fetch image to [datastore1] vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 761.095946] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 761.096960] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38c1990-e540-4093-b292-a4fd5d89abfa {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.107062] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1738d935-c44e-491d-abd2-13291e439b25 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.116759] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f3e73d4-c4da-4ba5-92d5-7080972dd9c2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.152094] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32928716-85f4-4f7d-a274-e9a11259c933 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.159066] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4b8d3856-2131-42c1-b5a2-28eda60f3522 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.186869] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.187361] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 18d8453a708445849286e9a0e2e1225f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.190510] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 761.204728] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18d8453a708445849286e9a0e2e1225f [ 761.205589] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Releasing lock "refresh_cache-d16dff71-2dab-469a-8cb1-40ed086c42d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.205589] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 761.206275] env[61594]: DEBUG nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 761.206275] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 761.271170] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 761.271170] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 1b4f75fb19804f488ba3e7d5120c6e4a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.273462] env[61594]: DEBUG oslo_vmware.rw_handles [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 761.332463] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b4f75fb19804f488ba3e7d5120c6e4a [ 761.335220] env[61594]: DEBUG nova.network.neutron [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.335707] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 6016aea3c9624aaba611ead7f07b94f3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.339389] env[61594]: DEBUG oslo_vmware.rw_handles [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 761.339573] env[61594]: DEBUG oslo_vmware.rw_handles [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 761.345324] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6016aea3c9624aaba611ead7f07b94f3 [ 761.346019] env[61594]: INFO nova.compute.manager [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] [instance: d16dff71-2dab-469a-8cb1-40ed086c42d1] Took 0.14 seconds to deallocate network for instance. [ 761.347946] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 7f8d32b4103b42eb89d33f667262da5d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.393376] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "791543a0-ad32-4b51-9d21-b5cc72e480ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.393614] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "791543a0-ad32-4b51-9d21-b5cc72e480ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.394095] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 3791657e55ed4ae8a9595fbc5dc67057 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.399107] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f8d32b4103b42eb89d33f667262da5d [ 761.401777] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg 1429ac22aaaa46868a5bb65b46f016d4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.404169] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3791657e55ed4ae8a9595fbc5dc67057 [ 761.404585] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 761.406210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg b36b0af039a646ed8a80525bd34cee30 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.440895] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1429ac22aaaa46868a5bb65b46f016d4 [ 761.463666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b36b0af039a646ed8a80525bd34cee30 [ 761.471813] env[61594]: INFO nova.scheduler.client.report [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Deleted allocations for instance d16dff71-2dab-469a-8cb1-40ed086c42d1 [ 761.478687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Expecting reply to msg b0fa8a6719f74d829cf5be8aacfae77b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.489521] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.490172] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.493284] env[61594]: INFO nova.compute.claims [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.494930] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 64e9839564c64f49951e384366cc9b5a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.497198] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0fa8a6719f74d829cf5be8aacfae77b [ 761.499591] env[61594]: DEBUG oslo_concurrency.lockutils [None req-33825971-5a59-4099-9f6b-088bd9c9c766 tempest-ServerAddressesNegativeTestJSON-1256841183 tempest-ServerAddressesNegativeTestJSON-1256841183-project-member] Lock "d16dff71-2dab-469a-8cb1-40ed086c42d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.193s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.543656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64e9839564c64f49951e384366cc9b5a [ 761.545490] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 0e143fc46ba142958da6d7912cf5d827 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.554368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e143fc46ba142958da6d7912cf5d827 [ 761.616409] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.617063] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg a2793d79bbf844b599aaf8c2175b2dd2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.638292] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2793d79bbf844b599aaf8c2175b2dd2 [ 761.638952] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-45a40160-c224-4a8f-8e92-26d770d4ff4f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 761.639183] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 761.639368] env[61594]: DEBUG nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 761.639532] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 761.688264] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 761.688876] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2c31ae397c05436faebc688087ff6a3e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.696853] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba39e38-572c-4619-a235-156d391e8ed3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.702595] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77205e40-4330-4af1-80ed-3a2080b0485d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.706595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c31ae397c05436faebc688087ff6a3e [ 761.707198] env[61594]: DEBUG nova.network.neutron [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.707790] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 699df81410274561a592d461cd3f66e6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.739229] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 699df81410274561a592d461cd3f66e6 [ 761.740186] env[61594]: INFO nova.compute.manager [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 45a40160-c224-4a8f-8e92-26d770d4ff4f] Took 0.10 seconds to deallocate network for instance. [ 761.742474] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 3d5d9c0991b5455897e5bb7f33c6670c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.747017] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e863317-6e13-41ff-b4e2-bb52a8d50c55 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.752422] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-481c453c-ce08-4e09-bb29-f5ddb633ce6d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.768015] env[61594]: DEBUG nova.compute.provider_tree [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 761.768015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg e4d9732a9cff45eebc6bf7bbf288f641 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.785191] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4d9732a9cff45eebc6bf7bbf288f641 [ 761.785725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d5d9c0991b5455897e5bb7f33c6670c [ 761.787777] env[61594]: DEBUG nova.scheduler.client.report [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 761.790606] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 55398a67b8b54ff4934070c7a255e1e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.798734] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 59aefd12dcdb493eb5c2c8e5cddd0eaf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.810272] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55398a67b8b54ff4934070c7a255e1e4 [ 761.810272] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.810272] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 761.811687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg a58f161a6d164e8f82f06e9f141e0050 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.843034] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59aefd12dcdb493eb5c2c8e5cddd0eaf [ 761.846378] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a58f161a6d164e8f82f06e9f141e0050 [ 761.847893] env[61594]: DEBUG nova.compute.utils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.848538] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 95d40e68c65d450983cdb05fb5596ff1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.849882] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 761.850121] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 761.859166] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95d40e68c65d450983cdb05fb5596ff1 [ 761.859588] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 761.861305] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg af5596375ce3430bb94c53c4f954de21 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.864122] env[61594]: INFO nova.scheduler.client.report [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Deleted allocations for instance 45a40160-c224-4a8f-8e92-26d770d4ff4f [ 761.872430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c672967b8fe54945863ad7ad66a97a6e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.897445] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c672967b8fe54945863ad7ad66a97a6e [ 761.897445] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c4759a7c-e8c1-4013-b1a8-5676310751b1 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "45a40160-c224-4a8f-8e92-26d770d4ff4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.617s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.904769] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af5596375ce3430bb94c53c4f954de21 [ 761.907677] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg db8e6950acd44a998882925c4ff050ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 761.939283] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db8e6950acd44a998882925c4ff050ff [ 761.940485] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 761.967689] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 761.967948] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 761.968211] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.968432] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 761.968584] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.968733] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 761.968939] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 761.969278] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 761.969480] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 761.969651] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 761.969827] env[61594]: DEBUG nova.virt.hardware [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 761.970708] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dadc4218-b60d-435d-99bc-af7cae91f47b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.979133] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130216cb-50f6-46f7-a62e-b0f2efe5c931 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.257043] env[61594]: DEBUG nova.policy [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f77e9c81e9eb4097bd16d9e763423d3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0bb8755136749c6a5166a19e20059df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 762.654104] env[61594]: ERROR nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. [ 762.654104] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 762.654104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 762.654104] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 762.654104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 762.654104] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 762.654104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 762.654104] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 762.654104] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.654104] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 762.654104] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.654104] env[61594]: ERROR nova.compute.manager raise self.value [ 762.654104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 762.654104] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 762.654104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.654104] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 762.655849] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.655849] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 762.655849] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. [ 762.655849] env[61594]: ERROR nova.compute.manager [ 762.655849] env[61594]: Traceback (most recent call last): [ 762.655849] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 762.655849] env[61594]: listener.cb(fileno) [ 762.655849] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 762.655849] env[61594]: result = function(*args, **kwargs) [ 762.655849] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 762.655849] env[61594]: return func(*args, **kwargs) [ 762.655849] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 762.655849] env[61594]: raise e [ 762.655849] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 762.655849] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 762.655849] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 762.655849] env[61594]: created_port_ids = self._update_ports_for_instance( [ 762.655849] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 762.655849] env[61594]: with excutils.save_and_reraise_exception(): [ 762.655849] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.655849] env[61594]: self.force_reraise() [ 762.655849] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.655849] env[61594]: raise self.value [ 762.655849] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 762.655849] env[61594]: updated_port = self._update_port( [ 762.655849] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.655849] env[61594]: _ensure_no_port_binding_failure(port) [ 762.655849] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.655849] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 762.657131] env[61594]: nova.exception.PortBindingFailed: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. [ 762.657131] env[61594]: Removing descriptor: 19 [ 762.657131] env[61594]: ERROR nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Traceback (most recent call last): [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] yield resources [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self.driver.spawn(context, instance, image_meta, [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 762.657131] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] vm_ref = self.build_virtual_machine(instance, [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] vif_infos = vmwarevif.get_vif_info(self._session, [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] for vif in network_info: [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return self._sync_wrapper(fn, *args, **kwargs) [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self.wait() [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self[:] = self._gt.wait() [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return self._exit_event.wait() [ 762.657631] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] result = hub.switch() [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return self.greenlet.switch() [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] result = function(*args, **kwargs) [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return func(*args, **kwargs) [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] raise e [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] nwinfo = self.network_api.allocate_for_instance( [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 762.658196] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] created_port_ids = self._update_ports_for_instance( [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] with excutils.save_and_reraise_exception(): [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self.force_reraise() [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] raise self.value [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] updated_port = self._update_port( [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] _ensure_no_port_binding_failure(port) [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 762.658721] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] raise exception.PortBindingFailed(port_id=port['id']) [ 762.659260] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] nova.exception.PortBindingFailed: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. [ 762.659260] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] [ 762.659260] env[61594]: INFO nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Terminating instance [ 762.663904] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Acquiring lock "refresh_cache-c588118f-5b15-4262-8d21-c4fbc3088f1f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 762.664125] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Acquired lock "refresh_cache-c588118f-5b15-4262-8d21-c4fbc3088f1f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.664261] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 762.664686] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 85f4d538fbeb4b8b963b884b529da2a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 762.676937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85f4d538fbeb4b8b963b884b529da2a7 [ 762.729615] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.137198] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.137778] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg ee7351aae5054754bcfab145e5dfed35 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.155714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee7351aae5054754bcfab145e5dfed35 [ 763.156509] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Releasing lock "refresh_cache-c588118f-5b15-4262-8d21-c4fbc3088f1f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.156914] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 763.157120] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 763.158564] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4716e105-9e82-4ba6-836e-994983958803 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.165225] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.167405] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.168030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg e583cd9044b34f98b76d7101935512bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.178478] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9b9ae2-9adb-4648-ac02-2e2b8159f07e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.192692] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e583cd9044b34f98b76d7101935512bd [ 763.193339] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 763.194942] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 3af7415438ea40448c17b246ee808bb2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.208611] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c588118f-5b15-4262-8d21-c4fbc3088f1f could not be found. [ 763.208849] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 763.209048] env[61594]: INFO nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 763.209312] env[61594]: DEBUG oslo.service.loopingcall [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.210870] env[61594]: DEBUG nova.compute.manager [-] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 763.211733] env[61594]: DEBUG nova.network.neutron [-] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 763.228403] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3af7415438ea40448c17b246ee808bb2 [ 763.243580] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.243744] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.246264] env[61594]: INFO nova.compute.claims [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.247772] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 81ea2742615e4371bbee44d38f59dfd6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.266792] env[61594]: DEBUG nova.network.neutron [-] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.267294] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 76eab5c8a4f247f187d84ce29f9f9da1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.274381] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76eab5c8a4f247f187d84ce29f9f9da1 [ 763.274804] env[61594]: DEBUG nova.network.neutron [-] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.275488] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8df62c83bcb54d688ef0fbd291a403b0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.290928] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81ea2742615e4371bbee44d38f59dfd6 [ 763.291680] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8df62c83bcb54d688ef0fbd291a403b0 [ 763.293217] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 592b7bd873524ae4bebfb51b131c9213 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.298021] env[61594]: INFO nova.compute.manager [-] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Took 0.08 seconds to deallocate network for instance. [ 763.298021] env[61594]: DEBUG nova.compute.claims [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 763.298021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 763.306899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 592b7bd873524ae4bebfb51b131c9213 [ 763.450782] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab55f75e-81ee-4717-9326-103c9d3d0776 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.460445] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94872a8-bf08-41a1-bff1-836c043c15ee {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.493283] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656f6895-23ae-434d-a874-019cd45500a1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.502077] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfd347e-be96-4599-8a3d-76a0a1226551 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.518471] env[61594]: DEBUG nova.compute.provider_tree [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.518986] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 39bb54e75c1142129b31033811e512ad in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.528951] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39bb54e75c1142129b31033811e512ad [ 763.529956] env[61594]: DEBUG nova.scheduler.client.report [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 763.532313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 2ce554cd9acd4c7789d11773bddbd83b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.548655] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ce554cd9acd4c7789d11773bddbd83b [ 763.549730] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.549958] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 763.551698] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 5cfb22e1e4fb4ce49afee8f27a5754ab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.552627] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.255s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 763.554313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 05955a4551f948f09a87d555a53c6197 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.595209] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cfb22e1e4fb4ce49afee8f27a5754ab [ 763.596647] env[61594]: DEBUG nova.compute.utils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 763.597255] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 746dffb6287d469188b7e8016e5863e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.599192] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Successfully created port: a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.601751] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 763.601751] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 763.603757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05955a4551f948f09a87d555a53c6197 [ 763.608586] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 746dffb6287d469188b7e8016e5863e1 [ 763.609130] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 763.612537] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 7dd0c080e0e44272be7b1b55734c9c54 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.647645] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7dd0c080e0e44272be7b1b55734c9c54 [ 763.653501] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 893a89a15a4746f6b98c0de058c8184c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.696766] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 893a89a15a4746f6b98c0de058c8184c [ 763.698040] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 763.726834] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:29:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='18ccd0ea-0912-46e4-8374-51cada2c5ae5',id=37,is_public=True,memory_mb=128,name='tempest-test_resize_flavor_-1273252548',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 763.726834] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 763.727025] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.727222] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 763.727332] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.727476] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 763.727676] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 763.727869] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 763.728977] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 763.728977] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 763.728977] env[61594]: DEBUG nova.virt.hardware [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.730155] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15cbaab5-28aa-4b77-a1e7-7641da4c6907 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.739590] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a97242c-0e27-4759-ad05-4e9ac0dbde45 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.768443] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0b687f-d8a6-44c1-aa4c-b3419ff69291 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.775740] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d0b395-e838-4ad3-a49e-3d1fc4db0086 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.807021] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa38d8b-9348-46f8-920c-a9e865c3c4d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.813583] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d0d6bf-b093-43c7-8737-b725699aac1f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.830022] env[61594]: DEBUG nova.compute.provider_tree [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.830022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 43658701c0f04433b2ed84c154f43bd3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.842487] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43658701c0f04433b2ed84c154f43bd3 [ 763.843986] env[61594]: DEBUG nova.scheduler.client.report [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 763.846643] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 29e9812f377b4bf0996afb11cb2bd8b0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.868057] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29e9812f377b4bf0996afb11cb2bd8b0 [ 763.868963] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.316s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.869523] env[61594]: ERROR nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Traceback (most recent call last): [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self.driver.spawn(context, instance, image_meta, [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] vm_ref = self.build_virtual_machine(instance, [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.869523] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] for vif in network_info: [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return self._sync_wrapper(fn, *args, **kwargs) [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self.wait() [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self[:] = self._gt.wait() [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return self._exit_event.wait() [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] result = hub.switch() [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.870025] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return self.greenlet.switch() [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] result = function(*args, **kwargs) [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] return func(*args, **kwargs) [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] raise e [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] nwinfo = self.network_api.allocate_for_instance( [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] created_port_ids = self._update_ports_for_instance( [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] with excutils.save_and_reraise_exception(): [ 763.870696] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] self.force_reraise() [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] raise self.value [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] updated_port = self._update_port( [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] _ensure_no_port_binding_failure(port) [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] raise exception.PortBindingFailed(port_id=port['id']) [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] nova.exception.PortBindingFailed: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. [ 763.871358] env[61594]: ERROR nova.compute.manager [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] [ 763.871872] env[61594]: DEBUG nova.compute.utils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 763.873596] env[61594]: DEBUG nova.policy [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bfb9e86398ee44b7b9e06dcdc29f8ff6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05666a0abf0b433184c14ff43e6e82c1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 763.876225] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Build of instance c588118f-5b15-4262-8d21-c4fbc3088f1f was re-scheduled: Binding failed for port cfc56a70-5dc7-481d-ab71-f33ec5765463, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 763.876698] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 763.876922] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Acquiring lock "refresh_cache-c588118f-5b15-4262-8d21-c4fbc3088f1f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.877084] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Acquired lock "refresh_cache-c588118f-5b15-4262-8d21-c4fbc3088f1f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.877269] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 763.877726] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 3fab6f9da1244f5399341002ce06c881 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.887583] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3fab6f9da1244f5399341002ce06c881 [ 763.907106] env[61594]: ERROR nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. [ 763.907106] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 763.907106] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 763.907106] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 763.907106] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.907106] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 763.907106] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.907106] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 763.907106] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.907106] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 763.907106] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.907106] env[61594]: ERROR nova.compute.manager raise self.value [ 763.907106] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.907106] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 763.907106] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.907106] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 763.907647] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.907647] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 763.907647] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. [ 763.907647] env[61594]: ERROR nova.compute.manager [ 763.907844] env[61594]: Traceback (most recent call last): [ 763.907919] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 763.907919] env[61594]: listener.cb(fileno) [ 763.907992] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 763.907992] env[61594]: result = function(*args, **kwargs) [ 763.908671] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.908671] env[61594]: return func(*args, **kwargs) [ 763.908671] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 763.908671] env[61594]: raise e [ 763.908671] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 763.908671] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 763.908671] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.908671] env[61594]: created_port_ids = self._update_ports_for_instance( [ 763.908671] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.908671] env[61594]: with excutils.save_and_reraise_exception(): [ 763.908671] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.908671] env[61594]: self.force_reraise() [ 763.908671] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.908671] env[61594]: raise self.value [ 763.908671] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.908671] env[61594]: updated_port = self._update_port( [ 763.908671] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.908671] env[61594]: _ensure_no_port_binding_failure(port) [ 763.908671] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.908671] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 763.908671] env[61594]: nova.exception.PortBindingFailed: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. [ 763.908671] env[61594]: Removing descriptor: 24 [ 763.909313] env[61594]: ERROR nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Traceback (most recent call last): [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] yield resources [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self.driver.spawn(context, instance, image_meta, [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self._vmops.spawn(context, instance, image_meta, injected_files, [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] vm_ref = self.build_virtual_machine(instance, [ 763.909313] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] vif_infos = vmwarevif.get_vif_info(self._session, [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] for vif in network_info: [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return self._sync_wrapper(fn, *args, **kwargs) [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self.wait() [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self[:] = self._gt.wait() [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return self._exit_event.wait() [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 763.909649] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] result = hub.switch() [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return self.greenlet.switch() [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] result = function(*args, **kwargs) [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return func(*args, **kwargs) [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] raise e [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] nwinfo = self.network_api.allocate_for_instance( [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] created_port_ids = self._update_ports_for_instance( [ 763.909954] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] with excutils.save_and_reraise_exception(): [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self.force_reraise() [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] raise self.value [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] updated_port = self._update_port( [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] _ensure_no_port_binding_failure(port) [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] raise exception.PortBindingFailed(port_id=port['id']) [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] nova.exception.PortBindingFailed: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. [ 763.910297] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] [ 763.910610] env[61594]: INFO nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Terminating instance [ 763.911281] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "refresh_cache-de519927-4dbd-4e6b-ba0a-d02d01950749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 763.912626] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquired lock "refresh_cache-de519927-4dbd-4e6b-ba0a-d02d01950749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.912626] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 763.912626] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg cccc00912d5b44e39c897d1db42eb2d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 763.922510] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cccc00912d5b44e39c897d1db42eb2d7 [ 763.966508] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.977392] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.383825] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.384967] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 8293c7a232a74b8d90cb0399c205c458 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.395664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8293c7a232a74b8d90cb0399c205c458 [ 764.397579] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Releasing lock "refresh_cache-c588118f-5b15-4262-8d21-c4fbc3088f1f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.397902] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 764.401150] env[61594]: DEBUG nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 764.401150] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 764.489945] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.490630] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg fb2fd855780f46c9b2f2eaa5d1ea4831 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.497225] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.498560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 7c6b87ac28df415f8f572f30d4cf8ce6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.499195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb2fd855780f46c9b2f2eaa5d1ea4831 [ 764.499647] env[61594]: DEBUG nova.network.neutron [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.500133] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg dbdfe10858f14fa5820d146746a1a902 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.514757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c6b87ac28df415f8f572f30d4cf8ce6 [ 764.515287] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbdfe10858f14fa5820d146746a1a902 [ 764.516397] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Releasing lock "refresh_cache-de519927-4dbd-4e6b-ba0a-d02d01950749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 764.516791] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 764.516986] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 764.517639] env[61594]: INFO nova.compute.manager [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] [instance: c588118f-5b15-4262-8d21-c4fbc3088f1f] Took 0.12 seconds to deallocate network for instance. [ 764.519269] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 8384284d1d6e434aa60444c5666f7a7d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.524017] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80e032b5-1601-4e44-bdfd-149842afeeb9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.531729] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd6c648-e1e6-45ee-af95-aecf7f1e80c9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.558350] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance de519927-4dbd-4e6b-ba0a-d02d01950749 could not be found. [ 764.558598] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 764.559181] env[61594]: INFO nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Took 0.04 seconds to destroy the instance on the hypervisor. [ 764.559368] env[61594]: DEBUG oslo.service.loopingcall [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.559639] env[61594]: DEBUG nova.compute.manager [-] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 764.559738] env[61594]: DEBUG nova.network.neutron [-] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 764.571610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8384284d1d6e434aa60444c5666f7a7d [ 764.574563] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg aae48dc0407c4861b00881083c608b7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.613815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aae48dc0407c4861b00881083c608b7e [ 764.621248] env[61594]: DEBUG nova.network.neutron [-] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.621248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f58a799271114d229bc77cb8166387bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.633749] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f58a799271114d229bc77cb8166387bb [ 764.635317] env[61594]: DEBUG nova.network.neutron [-] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.635774] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2741f487e09a46948e874891520f3c9c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.648339] env[61594]: INFO nova.scheduler.client.report [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Deleted allocations for instance c588118f-5b15-4262-8d21-c4fbc3088f1f [ 764.655850] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2741f487e09a46948e874891520f3c9c [ 764.659921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Expecting reply to msg 6a9d678ef37a46a6881d7b6bb433e7b7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.659921] env[61594]: INFO nova.compute.manager [-] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Took 0.10 seconds to deallocate network for instance. [ 764.661124] env[61594]: DEBUG nova.compute.claims [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 764.661306] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.661525] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.663460] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg adfb18a618414531a5055d15e47bdd1d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.689544] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a9d678ef37a46a6881d7b6bb433e7b7 [ 764.689544] env[61594]: DEBUG oslo_concurrency.lockutils [None req-36578c38-88dc-48c7-bda3-cdcf04efea93 tempest-ServerMetadataTestJSON-1448747604 tempest-ServerMetadataTestJSON-1448747604-project-member] Lock "c588118f-5b15-4262-8d21-c4fbc3088f1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.016s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 764.749505] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adfb18a618414531a5055d15e47bdd1d [ 764.903612] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fec09bf-ea8f-453d-b566-1c3a49142aaf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.911846] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc86bd3-5223-44f4-b7b8-d1eef5976eb6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.946824] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8569d9-1a96-4df4-8dbe-af929660e71e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.955529] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4da165-f386-4089-8889-0bbae1bbbbe5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.971206] env[61594]: DEBUG nova.compute.provider_tree [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.971758] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 4f5bd76617da42e187d7830aa64690ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.979880] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f5bd76617da42e187d7830aa64690ef [ 764.981500] env[61594]: DEBUG nova.scheduler.client.report [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 764.983270] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 3268fb18ada4444bb04c5d5d00ffeb7f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 764.998480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3268fb18ada4444bb04c5d5d00ffeb7f [ 764.999924] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.338s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.003277] env[61594]: ERROR nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Traceback (most recent call last): [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self.driver.spawn(context, instance, image_meta, [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] vm_ref = self.build_virtual_machine(instance, [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.003277] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] for vif in network_info: [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return self._sync_wrapper(fn, *args, **kwargs) [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self.wait() [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self[:] = self._gt.wait() [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return self._exit_event.wait() [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] result = hub.switch() [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.003839] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return self.greenlet.switch() [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] result = function(*args, **kwargs) [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] return func(*args, **kwargs) [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] raise e [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] nwinfo = self.network_api.allocate_for_instance( [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] created_port_ids = self._update_ports_for_instance( [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] with excutils.save_and_reraise_exception(): [ 765.004411] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] self.force_reraise() [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] raise self.value [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] updated_port = self._update_port( [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] _ensure_no_port_binding_failure(port) [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] raise exception.PortBindingFailed(port_id=port['id']) [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] nova.exception.PortBindingFailed: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. [ 765.005000] env[61594]: ERROR nova.compute.manager [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] [ 765.005487] env[61594]: DEBUG nova.compute.utils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 765.005487] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Build of instance de519927-4dbd-4e6b-ba0a-d02d01950749 was re-scheduled: Binding failed for port ff3cb204-5637-44ad-955a-535d5ce72a74, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 765.005918] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 765.006162] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquiring lock "refresh_cache-de519927-4dbd-4e6b-ba0a-d02d01950749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.006312] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Acquired lock "refresh_cache-de519927-4dbd-4e6b-ba0a-d02d01950749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.006474] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 765.006881] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 64d7cc9e95ce4c0289e31abf48af9a6f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.022106] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64d7cc9e95ce4c0289e31abf48af9a6f [ 765.076650] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.261379] env[61594]: ERROR nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. [ 765.261379] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 765.261379] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 765.261379] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 765.261379] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.261379] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 765.261379] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.261379] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 765.261379] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.261379] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 765.261379] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.261379] env[61594]: ERROR nova.compute.manager raise self.value [ 765.261379] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.261379] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 765.261379] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.261379] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 765.262091] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.262091] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 765.262091] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. [ 765.262091] env[61594]: ERROR nova.compute.manager [ 765.262091] env[61594]: Traceback (most recent call last): [ 765.262091] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 765.262091] env[61594]: listener.cb(fileno) [ 765.262091] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 765.262091] env[61594]: result = function(*args, **kwargs) [ 765.262091] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 765.262091] env[61594]: return func(*args, **kwargs) [ 765.262091] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 765.262091] env[61594]: raise e [ 765.262091] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 765.262091] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 765.262091] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.262091] env[61594]: created_port_ids = self._update_ports_for_instance( [ 765.262091] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.262091] env[61594]: with excutils.save_and_reraise_exception(): [ 765.262091] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.262091] env[61594]: self.force_reraise() [ 765.262091] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.262091] env[61594]: raise self.value [ 765.262091] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.262091] env[61594]: updated_port = self._update_port( [ 765.262091] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.262091] env[61594]: _ensure_no_port_binding_failure(port) [ 765.262091] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.262091] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 765.262836] env[61594]: nova.exception.PortBindingFailed: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. [ 765.262836] env[61594]: Removing descriptor: 22 [ 765.263035] env[61594]: ERROR nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Traceback (most recent call last): [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] yield resources [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self.driver.spawn(context, instance, image_meta, [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] vm_ref = self.build_virtual_machine(instance, [ 765.263035] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] for vif in network_info: [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return self._sync_wrapper(fn, *args, **kwargs) [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self.wait() [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self[:] = self._gt.wait() [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return self._exit_event.wait() [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.263324] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] result = hub.switch() [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return self.greenlet.switch() [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] result = function(*args, **kwargs) [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return func(*args, **kwargs) [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] raise e [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] nwinfo = self.network_api.allocate_for_instance( [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] created_port_ids = self._update_ports_for_instance( [ 765.263629] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] with excutils.save_and_reraise_exception(): [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self.force_reraise() [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] raise self.value [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] updated_port = self._update_port( [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] _ensure_no_port_binding_failure(port) [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] raise exception.PortBindingFailed(port_id=port['id']) [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] nova.exception.PortBindingFailed: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. [ 765.263958] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] [ 765.264313] env[61594]: INFO nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Terminating instance [ 765.266579] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "refresh_cache-3d6ead4a-228d-48da-89b3-4fa99f6299d2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.266959] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquired lock "refresh_cache-3d6ead4a-228d-48da-89b3-4fa99f6299d2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.267267] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 765.268530] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg c9fdc50db4c545ddb09ee55197de30d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.279338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c9fdc50db4c545ddb09ee55197de30d3 [ 765.338227] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.414248] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Successfully created port: 7e591090-397c-446d-bc6c-60ba035cbddd {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.479925] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.480537] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg b6ba71ffc1464bef9901aad18873eb64 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.492709] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6ba71ffc1464bef9901aad18873eb64 [ 765.493389] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Releasing lock "refresh_cache-de519927-4dbd-4e6b-ba0a-d02d01950749" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.493603] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 765.493777] env[61594]: DEBUG nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 765.494250] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 765.546983] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.547476] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg de74c522970441b296051e9c1b16d18f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.555818] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de74c522970441b296051e9c1b16d18f [ 765.556577] env[61594]: DEBUG nova.network.neutron [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.558099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg be19284089014c2a812f34221df2fcfc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.571024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be19284089014c2a812f34221df2fcfc [ 765.571024] env[61594]: INFO nova.compute.manager [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] [instance: de519927-4dbd-4e6b-ba0a-d02d01950749] Took 0.07 seconds to deallocate network for instance. [ 765.571024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 242fccb01a054b15a0fb022ed57b21ba in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.629044] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 242fccb01a054b15a0fb022ed57b21ba [ 765.629044] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 242bf372903d4a34baa17cde4ced1d02 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.668920] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 242bf372903d4a34baa17cde4ced1d02 [ 765.693256] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.693256] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 65298ebd1047485292cf2b95327a989f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.707860] env[61594]: INFO nova.scheduler.client.report [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Deleted allocations for instance de519927-4dbd-4e6b-ba0a-d02d01950749 [ 765.716643] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65298ebd1047485292cf2b95327a989f [ 765.719096] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Releasing lock "refresh_cache-3d6ead4a-228d-48da-89b3-4fa99f6299d2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 765.719096] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 765.719096] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 765.719096] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Expecting reply to msg 33482ea7fe4c4751b3780a5a05925faa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.719719] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-37826fc6-9218-4359-a321-947388801e48 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.733948] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8eba67e-4501-4f93-9d69-0699adffe2a9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.746649] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33482ea7fe4c4751b3780a5a05925faa [ 765.747416] env[61594]: DEBUG oslo_concurrency.lockutils [None req-59db6685-0dbe-4790-98e8-2893a7583856 tempest-AttachVolumeShelveTestJSON-2013570722 tempest-AttachVolumeShelveTestJSON-2013570722-project-member] Lock "de519927-4dbd-4e6b-ba0a-d02d01950749" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.641s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.760367] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d6ead4a-228d-48da-89b3-4fa99f6299d2 could not be found. [ 765.760634] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 765.760866] env[61594]: INFO nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 765.761057] env[61594]: DEBUG oslo.service.loopingcall [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 765.761529] env[61594]: DEBUG nova.compute.manager [-] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 765.761529] env[61594]: DEBUG nova.network.neutron [-] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 765.807774] env[61594]: DEBUG nova.network.neutron [-] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.808470] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d99d57cace88475fbd4fd2d9abe877c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.815523] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d99d57cace88475fbd4fd2d9abe877c5 [ 765.816022] env[61594]: DEBUG nova.network.neutron [-] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.816494] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9ca664e7458a4f8d887c6ec9f9532024 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.831319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ca664e7458a4f8d887c6ec9f9532024 [ 765.831836] env[61594]: INFO nova.compute.manager [-] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Took 0.07 seconds to deallocate network for instance. [ 765.834934] env[61594]: DEBUG nova.compute.claims [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 765.836297] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 765.837452] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.002s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.841164] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg faed876144c145a69d3036dc0a51edd2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 765.896796] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg faed876144c145a69d3036dc0a51edd2 [ 766.010570] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2aa2f9-4acc-4733-9720-3e4d75063f30 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.022836] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8559ee-5177-403b-a780-3319a67e9aba {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.061114] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84698666-f880-4f52-9e8f-4d847f83bfcf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.068959] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e046bd-5fa3-4ea8-aa2c-fec29735d6a0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.084627] env[61594]: DEBUG nova.compute.provider_tree [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.085206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg e9dcee360b3849c89a059384d83055bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.095038] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9dcee360b3849c89a059384d83055bc [ 766.097362] env[61594]: DEBUG nova.scheduler.client.report [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 766.099319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 7222599f03494db4ab3a839dd363d9ec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.114288] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7222599f03494db4ab3a839dd363d9ec [ 766.114993] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.278s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 766.115926] env[61594]: ERROR nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Traceback (most recent call last): [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self.driver.spawn(context, instance, image_meta, [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] vm_ref = self.build_virtual_machine(instance, [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] vif_infos = vmwarevif.get_vif_info(self._session, [ 766.115926] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] for vif in network_info: [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return self._sync_wrapper(fn, *args, **kwargs) [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self.wait() [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self[:] = self._gt.wait() [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return self._exit_event.wait() [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] result = hub.switch() [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 766.116581] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return self.greenlet.switch() [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] result = function(*args, **kwargs) [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] return func(*args, **kwargs) [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] raise e [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] nwinfo = self.network_api.allocate_for_instance( [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] created_port_ids = self._update_ports_for_instance( [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] with excutils.save_and_reraise_exception(): [ 766.116968] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] self.force_reraise() [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] raise self.value [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] updated_port = self._update_port( [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] _ensure_no_port_binding_failure(port) [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] raise exception.PortBindingFailed(port_id=port['id']) [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] nova.exception.PortBindingFailed: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. [ 766.117539] env[61594]: ERROR nova.compute.manager [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] [ 766.118139] env[61594]: DEBUG nova.compute.utils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 766.118139] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Build of instance 3d6ead4a-228d-48da-89b3-4fa99f6299d2 was re-scheduled: Binding failed for port dd2bb230-848c-4e9a-8343-59436d2fbda8, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 766.118554] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 766.118780] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "refresh_cache-3d6ead4a-228d-48da-89b3-4fa99f6299d2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.118953] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquired lock "refresh_cache-3d6ead4a-228d-48da-89b3-4fa99f6299d2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.119138] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.119617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 54d594fe587f4526a71ddd09ab66f6ae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.126983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54d594fe587f4526a71ddd09ab66f6ae [ 766.161702] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.685227] env[61594]: ERROR nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. [ 766.685227] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 766.685227] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 766.685227] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 766.685227] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 766.685227] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 766.685227] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 766.685227] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 766.685227] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.685227] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 766.685227] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.685227] env[61594]: ERROR nova.compute.manager raise self.value [ 766.685227] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 766.685227] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 766.685227] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.685227] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 766.685743] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.685743] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 766.685743] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. [ 766.685743] env[61594]: ERROR nova.compute.manager [ 766.685743] env[61594]: Traceback (most recent call last): [ 766.685743] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 766.685743] env[61594]: listener.cb(fileno) [ 766.685743] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 766.685743] env[61594]: result = function(*args, **kwargs) [ 766.685743] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.685743] env[61594]: return func(*args, **kwargs) [ 766.685743] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 766.685743] env[61594]: raise e [ 766.685743] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 766.685743] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 766.685743] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 766.685743] env[61594]: created_port_ids = self._update_ports_for_instance( [ 766.685743] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 766.685743] env[61594]: with excutils.save_and_reraise_exception(): [ 766.685743] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.685743] env[61594]: self.force_reraise() [ 766.685743] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.685743] env[61594]: raise self.value [ 766.685743] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 766.685743] env[61594]: updated_port = self._update_port( [ 766.685743] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.685743] env[61594]: _ensure_no_port_binding_failure(port) [ 766.685743] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.685743] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 766.686542] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. [ 766.686542] env[61594]: Removing descriptor: 21 [ 766.686719] env[61594]: ERROR nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Traceback (most recent call last): [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] yield resources [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self.driver.spawn(context, instance, image_meta, [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] vm_ref = self.build_virtual_machine(instance, [ 766.686719] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] vif_infos = vmwarevif.get_vif_info(self._session, [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] for vif in network_info: [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return self._sync_wrapper(fn, *args, **kwargs) [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self.wait() [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self[:] = self._gt.wait() [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return self._exit_event.wait() [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 766.686986] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] result = hub.switch() [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return self.greenlet.switch() [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] result = function(*args, **kwargs) [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return func(*args, **kwargs) [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] raise e [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] nwinfo = self.network_api.allocate_for_instance( [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] created_port_ids = self._update_ports_for_instance( [ 766.687328] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] with excutils.save_and_reraise_exception(): [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self.force_reraise() [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] raise self.value [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] updated_port = self._update_port( [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] _ensure_no_port_binding_failure(port) [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] raise exception.PortBindingFailed(port_id=port['id']) [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] nova.exception.PortBindingFailed: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. [ 766.687667] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] [ 766.687987] env[61594]: INFO nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Terminating instance [ 766.692877] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Acquiring lock "refresh_cache-f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.693196] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Acquired lock "refresh_cache-f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.693503] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.694169] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg a2be0db0d14e4e17af22f9b987bf1284 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.704096] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2be0db0d14e4e17af22f9b987bf1284 [ 766.767298] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.768067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg f0ff0ccb74734bbb8018714a65a7be4d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.775763] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.782517] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0ff0ccb74734bbb8018714a65a7be4d [ 766.782905] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Releasing lock "refresh_cache-3d6ead4a-228d-48da-89b3-4fa99f6299d2" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.783239] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 766.783518] env[61594]: DEBUG nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 766.783775] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.828462] env[61594]: ERROR nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. [ 766.828462] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 766.828462] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 766.828462] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 766.828462] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 766.828462] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 766.828462] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 766.828462] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 766.828462] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.828462] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 766.828462] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.828462] env[61594]: ERROR nova.compute.manager raise self.value [ 766.828462] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 766.828462] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 766.828462] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.828462] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 766.829441] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.829441] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 766.829441] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. [ 766.829441] env[61594]: ERROR nova.compute.manager [ 766.829441] env[61594]: Traceback (most recent call last): [ 766.829441] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 766.829441] env[61594]: listener.cb(fileno) [ 766.829441] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 766.829441] env[61594]: result = function(*args, **kwargs) [ 766.829441] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.829441] env[61594]: return func(*args, **kwargs) [ 766.829441] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 766.829441] env[61594]: raise e [ 766.829441] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 766.829441] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 766.829441] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 766.829441] env[61594]: created_port_ids = self._update_ports_for_instance( [ 766.829441] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 766.829441] env[61594]: with excutils.save_and_reraise_exception(): [ 766.829441] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.829441] env[61594]: self.force_reraise() [ 766.829441] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.829441] env[61594]: raise self.value [ 766.829441] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 766.829441] env[61594]: updated_port = self._update_port( [ 766.829441] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.829441] env[61594]: _ensure_no_port_binding_failure(port) [ 766.829441] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.829441] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 766.831349] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. [ 766.831349] env[61594]: Removing descriptor: 17 [ 766.831349] env[61594]: ERROR nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] Traceback (most recent call last): [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] yield resources [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self.driver.spawn(context, instance, image_meta, [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self._vmops.spawn(context, instance, image_meta, injected_files, [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 766.831349] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] vm_ref = self.build_virtual_machine(instance, [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] vif_infos = vmwarevif.get_vif_info(self._session, [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] for vif in network_info: [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return self._sync_wrapper(fn, *args, **kwargs) [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self.wait() [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self[:] = self._gt.wait() [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return self._exit_event.wait() [ 766.831927] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] result = hub.switch() [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return self.greenlet.switch() [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] result = function(*args, **kwargs) [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return func(*args, **kwargs) [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] raise e [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] nwinfo = self.network_api.allocate_for_instance( [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 766.832319] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] created_port_ids = self._update_ports_for_instance( [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] with excutils.save_and_reraise_exception(): [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self.force_reraise() [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] raise self.value [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] updated_port = self._update_port( [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] _ensure_no_port_binding_failure(port) [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 766.832682] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] raise exception.PortBindingFailed(port_id=port['id']) [ 766.833211] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] nova.exception.PortBindingFailed: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. [ 766.833211] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] [ 766.833211] env[61594]: INFO nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Terminating instance [ 766.836238] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.837056] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 23f6653ed04c4a35a63756185b4bd3e2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.838342] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Acquiring lock "refresh_cache-27788608-fef5-4163-932d-be6e2f60a541" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.838584] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Acquired lock "refresh_cache-27788608-fef5-4163-932d-be6e2f60a541" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.838828] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.839375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 96202715ad2d42ec868e55e958e9e802 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.849219] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23f6653ed04c4a35a63756185b4bd3e2 [ 766.849946] env[61594]: DEBUG nova.network.neutron [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.850626] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 1a8b1f514d8240fabcfc9433437b97bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.854324] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96202715ad2d42ec868e55e958e9e802 [ 766.858200] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a8b1f514d8240fabcfc9433437b97bd [ 766.858904] env[61594]: INFO nova.compute.manager [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 3d6ead4a-228d-48da-89b3-4fa99f6299d2] Took 0.08 seconds to deallocate network for instance. [ 766.865168] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg f5f7239b556e4593b3e65fd523d9a8fd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.917230] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.924913] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5f7239b556e4593b3e65fd523d9a8fd [ 766.933029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg c0021251f2cd43e4b72055e5e05151dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 766.980935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0021251f2cd43e4b72055e5e05151dc [ 767.012508] env[61594]: INFO nova.scheduler.client.report [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Deleted allocations for instance 3d6ead4a-228d-48da-89b3-4fa99f6299d2 [ 767.026629] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 4562ee4aba624eccbb0ad5d07d899aba in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.047773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4562ee4aba624eccbb0ad5d07d899aba [ 767.048443] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d1f65603-0837-4a83-bf94-b8a91ee56633 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "3d6ead4a-228d-48da-89b3-4fa99f6299d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.148s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.107797] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.107797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg da59b85837ff44bb93b702aa6fe4228f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.115788] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da59b85837ff44bb93b702aa6fe4228f [ 767.116393] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Releasing lock "refresh_cache-f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.116981] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 767.116981] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 767.117325] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f37b882d-bca0-4520-9768-388fe558ccd1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.129536] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c151de-277f-4a74-ba8d-d4ef40a65232 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.157018] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a could not be found. [ 767.157018] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 767.157018] env[61594]: INFO nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 767.157018] env[61594]: DEBUG oslo.service.loopingcall [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.157262] env[61594]: DEBUG nova.compute.manager [-] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 767.157349] env[61594]: DEBUG nova.network.neutron [-] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 767.197533] env[61594]: DEBUG nova.network.neutron [-] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.198634] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 627a08d0a65b44d0b54b4feb05f47524 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.207799] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 627a08d0a65b44d0b54b4feb05f47524 [ 767.208266] env[61594]: DEBUG nova.network.neutron [-] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.208687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 44820705e58849ba92a69789f6195660 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.211346] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.211767] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 95dd2a578d6e479884a90b90b336ade7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.218414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44820705e58849ba92a69789f6195660 [ 767.218865] env[61594]: INFO nova.compute.manager [-] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Took 0.06 seconds to deallocate network for instance. [ 767.221164] env[61594]: DEBUG nova.compute.claims [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 767.221332] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.221801] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.223550] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 3d11fa2965344ba8868190e596baa4a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.225617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95dd2a578d6e479884a90b90b336ade7 [ 767.226431] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Releasing lock "refresh_cache-27788608-fef5-4163-932d-be6e2f60a541" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.226591] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 767.226782] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 767.227275] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2d2c571-2419-448d-85b2-bc9c5961facd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.236608] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c178d2-f831-43f1-9503-d002d21f2788 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.264920] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 27788608-fef5-4163-932d-be6e2f60a541 could not be found. [ 767.265183] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 767.265487] env[61594]: INFO nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Took 0.04 seconds to destroy the instance on the hypervisor. [ 767.265768] env[61594]: DEBUG oslo.service.loopingcall [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.266397] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d11fa2965344ba8868190e596baa4a1 [ 767.266733] env[61594]: DEBUG nova.compute.manager [-] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 767.266831] env[61594]: DEBUG nova.network.neutron [-] [instance: 27788608-fef5-4163-932d-be6e2f60a541] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 767.291644] env[61594]: DEBUG nova.network.neutron [-] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.291644] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 328725f9291d455a8a83ac518bd4b83c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.307458] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 328725f9291d455a8a83ac518bd4b83c [ 767.308655] env[61594]: DEBUG nova.network.neutron [-] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.310144] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f856d4456a2a40e19550230b416b01a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.328964] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f856d4456a2a40e19550230b416b01a9 [ 767.328964] env[61594]: INFO nova.compute.manager [-] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Took 0.06 seconds to deallocate network for instance. [ 767.330957] env[61594]: DEBUG nova.compute.claims [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 767.331156] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.423927] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a234732c-f771-4a9f-9e2d-508f155157fe {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.434337] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a6bd94-5e2b-4c22-a4b7-34736ab26914 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.473567] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3efddbb-96ec-425e-a4fa-36db760705a7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.480193] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "b6ee7d2a-dec8-4dad-b220-483e3313da31" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.480474] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "b6ee7d2a-dec8-4dad-b220-483e3313da31" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.480893] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg d419d8beeee14c9b8c204b0dfa3a3a57 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.487728] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829834b7-e375-4f80-9689-a60a2e189c7b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.511500] env[61594]: DEBUG nova.compute.provider_tree [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.511500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg c96fdfb9672a4d66a1beb31b26a12920 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.516686] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d419d8beeee14c9b8c204b0dfa3a3a57 [ 767.517186] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 767.518903] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 0c67ecc51e58458b9c58b4ca79694583 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.521937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c96fdfb9672a4d66a1beb31b26a12920 [ 767.523330] env[61594]: DEBUG nova.scheduler.client.report [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 767.525328] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg f9410ad8da994af1a2bdbd0b42937506 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.546841] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9410ad8da994af1a2bdbd0b42937506 [ 767.549179] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.326s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.549179] env[61594]: ERROR nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. [ 767.549179] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Traceback (most recent call last): [ 767.549179] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 767.549179] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self.driver.spawn(context, instance, image_meta, [ 767.549179] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 767.549179] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 767.549179] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] vm_ref = self.build_virtual_machine(instance, [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] vif_infos = vmwarevif.get_vif_info(self._session, [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] for vif in network_info: [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return self._sync_wrapper(fn, *args, **kwargs) [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self.wait() [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self[:] = self._gt.wait() [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 767.549696] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return self._exit_event.wait() [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] result = hub.switch() [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return self.greenlet.switch() [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] result = function(*args, **kwargs) [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] return func(*args, **kwargs) [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] raise e [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] nwinfo = self.network_api.allocate_for_instance( [ 767.550094] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] created_port_ids = self._update_ports_for_instance( [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] with excutils.save_and_reraise_exception(): [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] self.force_reraise() [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] raise self.value [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] updated_port = self._update_port( [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] _ensure_no_port_binding_failure(port) [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.550501] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] raise exception.PortBindingFailed(port_id=port['id']) [ 767.550824] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] nova.exception.PortBindingFailed: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. [ 767.550824] env[61594]: ERROR nova.compute.manager [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] [ 767.550824] env[61594]: DEBUG nova.compute.utils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 767.553298] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Build of instance f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a was re-scheduled: Binding failed for port 65cc7798-0ef5-4fd5-8396-eed3859fc1af, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 767.553298] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 767.553298] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Acquiring lock "refresh_cache-f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.553298] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Acquired lock "refresh_cache-f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.553596] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 767.553596] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 337dec904147498a9afe82efa89e576d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.556078] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.223s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.556371] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 1c120e4f4d864c02891360d0109d2e1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.564661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 337dec904147498a9afe82efa89e576d [ 767.578304] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c67ecc51e58458b9c58b4ca79694583 [ 767.601470] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.610048] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.613968] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c120e4f4d864c02891360d0109d2e1a [ 767.670846] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg e0d4d6ae5127481e9ad4d5e8cc16e7f0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.686781] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0d4d6ae5127481e9ad4d5e8cc16e7f0 [ 767.687483] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.701623] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.702688] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.703097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 916ff0a8648a4ef18937317b57deb637 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.715032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 916ff0a8648a4ef18937317b57deb637 [ 767.715032] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 767.716468] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 846f436cbdaa46a9af0aad94357eb843 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.752230] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8d96570-25a0-4c31-8c55-5336437a0e9f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.761671] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a087ae4-1893-4be8-95be-16170d1f937f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.766802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 846f436cbdaa46a9af0aad94357eb843 [ 767.800938] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971ceb30-ec9f-4109-9787-5041c031839b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.815592] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.816689] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6a6af2-45f3-4ace-af01-2f3a64ca8490 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.833168] env[61594]: DEBUG nova.compute.provider_tree [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 767.833928] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg f125aaa907854e6ba618b90ff02dd59f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.841778] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f125aaa907854e6ba618b90ff02dd59f [ 767.842802] env[61594]: DEBUG nova.scheduler.client.report [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 767.845222] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 5e81ff6c4f4842929a746e1561ac56eb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.861329] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e81ff6c4f4842929a746e1561ac56eb [ 767.862186] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.862874] env[61594]: ERROR nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] Traceback (most recent call last): [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self.driver.spawn(context, instance, image_meta, [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self._vmops.spawn(context, instance, image_meta, injected_files, [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] vm_ref = self.build_virtual_machine(instance, [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] vif_infos = vmwarevif.get_vif_info(self._session, [ 767.862874] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] for vif in network_info: [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return self._sync_wrapper(fn, *args, **kwargs) [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self.wait() [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self[:] = self._gt.wait() [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return self._exit_event.wait() [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] result = hub.switch() [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 767.863256] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return self.greenlet.switch() [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] result = function(*args, **kwargs) [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] return func(*args, **kwargs) [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] raise e [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] nwinfo = self.network_api.allocate_for_instance( [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] created_port_ids = self._update_ports_for_instance( [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] with excutils.save_and_reraise_exception(): [ 767.863586] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] self.force_reraise() [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] raise self.value [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] updated_port = self._update_port( [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] _ensure_no_port_binding_failure(port) [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] raise exception.PortBindingFailed(port_id=port['id']) [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] nova.exception.PortBindingFailed: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. [ 767.863915] env[61594]: ERROR nova.compute.manager [instance: 27788608-fef5-4163-932d-be6e2f60a541] [ 767.864208] env[61594]: DEBUG nova.compute.utils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 767.864717] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.263s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.866236] env[61594]: INFO nova.compute.claims [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.867771] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg f52ed1b385e44f62b6b359121d22b522 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.871739] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Build of instance 27788608-fef5-4163-932d-be6e2f60a541 was re-scheduled: Binding failed for port 68b29d7a-88f7-4130-a084-890a355e3dbb, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 767.871739] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 767.871739] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Acquiring lock "refresh_cache-27788608-fef5-4163-932d-be6e2f60a541" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 767.871739] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Acquired lock "refresh_cache-27788608-fef5-4163-932d-be6e2f60a541" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.872056] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 767.872056] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 8cb3e663b790426b8f982de93335bf84 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.881412] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cb3e663b790426b8f982de93335bf84 [ 767.911822] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f52ed1b385e44f62b6b359121d22b522 [ 767.913440] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg c6b65e6cb7394b5cb00a8863d6955155 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 767.928233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6b65e6cb7394b5cb00a8863d6955155 [ 767.975654] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 768.039016] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.039982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 6f12f3b8127244f8a6e26d30f9ebcbdd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.050263] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba751e89-fb5e-4048-8e4e-f9c36cce5b70 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.056195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f12f3b8127244f8a6e26d30f9ebcbdd [ 768.056497] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Releasing lock "refresh_cache-f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.056722] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 768.056939] env[61594]: DEBUG nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 768.057081] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 768.062314] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5618b81a-b464-4596-82e4-ae73e2f8593c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.095770] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8a8331-b36d-4d0e-8b47-35c6cc1dd13b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.104869] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b545e4b-1843-4565-9cb1-de70d4b25262 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.110044] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 768.110697] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg b30bc92ec50d4eb9b70b9961ac6a9704 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.122499] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b30bc92ec50d4eb9b70b9961ac6a9704 [ 768.123331] env[61594]: DEBUG nova.compute.provider_tree [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.123631] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 4f175a4bb1da47c39985fc0a56fb46e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.126853] env[61594]: DEBUG nova.network.neutron [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.127452] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 73ed76b295ac4578bbab96eeee180bfa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.133152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f175a4bb1da47c39985fc0a56fb46e0 [ 768.134101] env[61594]: DEBUG nova.scheduler.client.report [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 768.138018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 4c194a75d28047cb84e7a591459ae85b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.138018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73ed76b295ac4578bbab96eeee180bfa [ 768.138018] env[61594]: INFO nova.compute.manager [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] [instance: f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a] Took 0.08 seconds to deallocate network for instance. [ 768.139521] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 121bf0f039e048af866dcb1c1894ba5c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.155798] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c194a75d28047cb84e7a591459ae85b [ 768.155798] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.289s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.155798] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 768.156246] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 8183de9d8ef349e994c8fb63297c7e9b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.157176] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.342s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.158992] env[61594]: INFO nova.compute.claims [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 768.161345] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg f5e5b9bb92fe49f2b48b2c8342f47702 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.192944] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8183de9d8ef349e994c8fb63297c7e9b [ 768.194522] env[61594]: DEBUG nova.compute.utils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 768.195464] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 430f2cede5184e40b7bb4724097cfe16 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.196482] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 768.196702] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 768.206712] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 121bf0f039e048af866dcb1c1894ba5c [ 768.207199] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 430f2cede5184e40b7bb4724097cfe16 [ 768.209636] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 08414916fb844f119259263b73f77ecb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.210735] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 768.212326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg d5e43b90b92b41839c68553c6486ae1e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.213597] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5e5b9bb92fe49f2b48b2c8342f47702 [ 768.215199] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 5991d2aa21a84be5b79ebfed88bd9ea4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.231296] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5991d2aa21a84be5b79ebfed88bd9ea4 [ 768.243060] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5e43b90b92b41839c68553c6486ae1e [ 768.245784] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg dccbd7570bfe431da6554abf079b4feb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.249939] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08414916fb844f119259263b73f77ecb [ 768.279752] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dccbd7570bfe431da6554abf079b4feb [ 768.280545] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 768.285865] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.287015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 13f209134f42412fbd3a2379f503645e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.290185] env[61594]: INFO nova.scheduler.client.report [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Deleted allocations for instance f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a [ 768.298096] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13f209134f42412fbd3a2379f503645e [ 768.298837] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Expecting reply to msg 2ee25e2a967c45b18579a3ced9aa4f98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.299697] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Releasing lock "refresh_cache-27788608-fef5-4163-932d-be6e2f60a541" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.301774] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 768.301774] env[61594]: DEBUG nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 768.301774] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 768.312550] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 768.312550] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 768.312753] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.312811] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 768.312955] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.313123] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 768.313335] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 768.313490] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 768.313655] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 768.313817] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 768.313989] env[61594]: DEBUG nova.virt.hardware [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.315476] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74a0335-1806-44d8-a934-41df38d5c98e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.321851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ee25e2a967c45b18579a3ced9aa4f98 [ 768.323095] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c70b2b64-1ac4-43d4-8b2d-ab4e1e814afe tempest-FloatingIPsAssociationNegativeTestJSON-1743565394 tempest-FloatingIPsAssociationNegativeTestJSON-1743565394-project-member] Lock "f5f4088e-0d79-4d6f-ae45-6ab2e5122e0a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.770s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.328094] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71cf0a0-3d52-42d1-896c-d9aa54f71eb1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.336805] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 768.337423] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 13bc592735b04d45b8b31eaf2408aafb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.352163] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13bc592735b04d45b8b31eaf2408aafb [ 768.353064] env[61594]: DEBUG nova.network.neutron [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.354428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg d25c006a0a3b47e5a5fca7031f982f0b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.365999] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d25c006a0a3b47e5a5fca7031f982f0b [ 768.367165] env[61594]: ERROR nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. [ 768.367165] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 768.367165] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 768.367165] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 768.367165] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 768.367165] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 768.367165] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 768.367165] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 768.367165] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.367165] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 768.367165] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.367165] env[61594]: ERROR nova.compute.manager raise self.value [ 768.367165] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 768.367165] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 768.367165] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.367165] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 768.367680] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.367680] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 768.367680] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. [ 768.367680] env[61594]: ERROR nova.compute.manager [ 768.367680] env[61594]: Traceback (most recent call last): [ 768.367680] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 768.367680] env[61594]: listener.cb(fileno) [ 768.367680] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 768.367680] env[61594]: result = function(*args, **kwargs) [ 768.367680] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 768.367680] env[61594]: return func(*args, **kwargs) [ 768.367680] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 768.367680] env[61594]: raise e [ 768.367680] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 768.367680] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 768.367680] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 768.367680] env[61594]: created_port_ids = self._update_ports_for_instance( [ 768.367680] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 768.367680] env[61594]: with excutils.save_and_reraise_exception(): [ 768.367680] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.367680] env[61594]: self.force_reraise() [ 768.367680] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.367680] env[61594]: raise self.value [ 768.367680] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 768.367680] env[61594]: updated_port = self._update_port( [ 768.367680] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.367680] env[61594]: _ensure_no_port_binding_failure(port) [ 768.367680] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.367680] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 768.368516] env[61594]: nova.exception.PortBindingFailed: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. [ 768.368516] env[61594]: Removing descriptor: 20 [ 768.368516] env[61594]: INFO nova.compute.manager [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] [instance: 27788608-fef5-4163-932d-be6e2f60a541] Took 0.07 seconds to deallocate network for instance. [ 768.371764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg e19548398a51470db034dfc1fe704eb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.373147] env[61594]: ERROR nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Traceback (most recent call last): [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] yield resources [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self.driver.spawn(context, instance, image_meta, [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] vm_ref = self.build_virtual_machine(instance, [ 768.373147] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] vif_infos = vmwarevif.get_vif_info(self._session, [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] for vif in network_info: [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return self._sync_wrapper(fn, *args, **kwargs) [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self.wait() [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self[:] = self._gt.wait() [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return self._exit_event.wait() [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 768.373531] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] result = hub.switch() [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return self.greenlet.switch() [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] result = function(*args, **kwargs) [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return func(*args, **kwargs) [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] raise e [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] nwinfo = self.network_api.allocate_for_instance( [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] created_port_ids = self._update_ports_for_instance( [ 768.373858] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] with excutils.save_and_reraise_exception(): [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self.force_reraise() [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] raise self.value [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] updated_port = self._update_port( [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] _ensure_no_port_binding_failure(port) [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] raise exception.PortBindingFailed(port_id=port['id']) [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] nova.exception.PortBindingFailed: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. [ 768.374219] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] [ 768.374590] env[61594]: INFO nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Terminating instance [ 768.378678] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "refresh_cache-791543a0-ad32-4b51-9d21-b5cc72e480ff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 768.378896] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquired lock "refresh_cache-791543a0-ad32-4b51-9d21-b5cc72e480ff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.379101] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 768.379592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 50ea09811d5e4a9ca8304d6a105ed48f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.394023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50ea09811d5e4a9ca8304d6a105ed48f [ 768.400851] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7bd29fa-4133-478c-beea-69335cfaab8c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.409289] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38923e4-65dd-45bd-956c-28c0caaef2bc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.439465] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e19548398a51470db034dfc1fe704eb4 [ 768.440763] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 768.444673] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg ed9117ec35664637ab36efbc470f484d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.446148] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e7d7d6-3c32-4aaf-9e79-40e7578eee46 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.455818] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4783523c-2019-460f-a5d6-06429063da7e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.475894] env[61594]: DEBUG nova.compute.provider_tree [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.476282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 5f91bf1751e446c4a67c937d9a161125 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.492950] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f91bf1751e446c4a67c937d9a161125 [ 768.493525] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed9117ec35664637ab36efbc470f484d [ 768.494562] env[61594]: DEBUG nova.scheduler.client.report [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 768.497174] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 2c469fcbbe3e450696ad86ba8d943640 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.511877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c469fcbbe3e450696ad86ba8d943640 [ 768.512496] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.355s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.513313] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 768.514955] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 46f9429ea5104befb8b106812aa5bc18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.535188] env[61594]: INFO nova.scheduler.client.report [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Deleted allocations for instance 27788608-fef5-4163-932d-be6e2f60a541 [ 768.540782] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Expecting reply to msg 410db7a200644d558c2c4a85b5354d28 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.568317] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 46f9429ea5104befb8b106812aa5bc18 [ 768.569864] env[61594]: DEBUG nova.compute.utils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 768.570725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg dcde6afa78ec486b8ce29891f6a623ac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.571906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 410db7a200644d558c2c4a85b5354d28 [ 768.572457] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 768.572622] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 768.574906] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3c4169f2-d315-4bc5-868d-348478a0e99b tempest-InstanceActionsV221TestJSON-559212044 tempest-InstanceActionsV221TestJSON-559212044-project-member] Lock "27788608-fef5-4163-932d-be6e2f60a541" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.020s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.581402] env[61594]: DEBUG nova.policy [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ce2159a0ccb46c89a7574d04142e926', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbc2efe50ed4ae5a5f0cf6f492a20bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 768.583167] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcde6afa78ec486b8ce29891f6a623ac [ 768.583672] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 768.585441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 38ca187ebb37490fb3d0477b44d7ad05 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.635097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38ca187ebb37490fb3d0477b44d7ad05 [ 768.637817] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 65e3788b0482410bb8e66fca4cb5e699 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.674438] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65e3788b0482410bb8e66fca4cb5e699 [ 768.675981] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 768.710131] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 768.710131] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 768.710131] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 768.710447] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 768.710447] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 768.710447] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 768.710447] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 768.710846] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 768.710846] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 768.710846] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 768.711014] env[61594]: DEBUG nova.virt.hardware [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 768.712305] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f213b45-dc69-4c7d-aed0-48b34eb31ff4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.721585] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8f6e19c-5d08-404f-938d-86f946e6887b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.801471] env[61594]: DEBUG nova.policy [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f77e9c81e9eb4097bd16d9e763423d3d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f0bb8755136749c6a5166a19e20059df', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 768.812233] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.812233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 74bc0eaf228845f3a087adfa06fae74b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.830488] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74bc0eaf228845f3a087adfa06fae74b [ 768.831253] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Releasing lock "refresh_cache-791543a0-ad32-4b51-9d21-b5cc72e480ff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 768.831677] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 768.831850] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 768.832409] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-493175a8-fb7a-4f4c-b013-690b2076cade {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.841593] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba399e73-9b2f-4ed1-8b3f-eccbd173661b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.865231] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 791543a0-ad32-4b51-9d21-b5cc72e480ff could not be found. [ 768.865491] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 768.865653] env[61594]: INFO nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Took 0.03 seconds to destroy the instance on the hypervisor. [ 768.865895] env[61594]: DEBUG oslo.service.loopingcall [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 768.866142] env[61594]: DEBUG nova.compute.manager [-] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 768.866241] env[61594]: DEBUG nova.network.neutron [-] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 768.927402] env[61594]: DEBUG nova.network.neutron [-] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 768.927402] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 133b310aae6d40ba97a24414378ca877 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.939657] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 133b310aae6d40ba97a24414378ca877 [ 768.940163] env[61594]: DEBUG nova.network.neutron [-] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.940600] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1e2be302d7354fefa4288752b8555015 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 768.955283] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e2be302d7354fefa4288752b8555015 [ 768.955283] env[61594]: INFO nova.compute.manager [-] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Took 0.09 seconds to deallocate network for instance. [ 768.956679] env[61594]: DEBUG nova.compute.claims [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 768.963322] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.963322] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.004s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 768.963322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 21550afa9e2f413faf40a2fc46222323 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.035592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21550afa9e2f413faf40a2fc46222323 [ 769.098409] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Acquiring lock "017f274d-c305-4aff-977a-c8bb2827880f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.098409] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Lock "017f274d-c305-4aff-977a-c8bb2827880f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.098409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg a548210eab7948eba56eece6af7ce600 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.113699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a548210eab7948eba56eece6af7ce600 [ 769.114572] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 769.116520] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 53bfcfaacf33443db602d11a9fdb34bf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.170930] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0523ef77-de1f-405e-8f70-ecd711600042 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.180180] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4b6569-7802-4e88-afaa-c2bc4456f1f2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.186791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53bfcfaacf33443db602d11a9fdb34bf [ 769.221148] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cffaf13-1438-499e-a970-bbed53ec701d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.232539] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390fdc62-adb0-4888-a49c-13dd2cd37c6a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.237915] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.248649] env[61594]: DEBUG nova.compute.provider_tree [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.249307] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 72a5c0f8e3dd4770aae308baa24b4134 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.256512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72a5c0f8e3dd4770aae308baa24b4134 [ 769.257930] env[61594]: DEBUG nova.scheduler.client.report [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 769.259603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg fe9d2ae076a147e3aab8856432be89c6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.276267] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe9d2ae076a147e3aab8856432be89c6 [ 769.277151] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.317s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.277735] env[61594]: ERROR nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Traceback (most recent call last): [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self.driver.spawn(context, instance, image_meta, [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] vm_ref = self.build_virtual_machine(instance, [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] vif_infos = vmwarevif.get_vif_info(self._session, [ 769.277735] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] for vif in network_info: [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return self._sync_wrapper(fn, *args, **kwargs) [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self.wait() [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self[:] = self._gt.wait() [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return self._exit_event.wait() [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] result = hub.switch() [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 769.278176] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return self.greenlet.switch() [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] result = function(*args, **kwargs) [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] return func(*args, **kwargs) [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] raise e [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] nwinfo = self.network_api.allocate_for_instance( [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] created_port_ids = self._update_ports_for_instance( [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] with excutils.save_and_reraise_exception(): [ 769.279635] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] self.force_reraise() [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] raise self.value [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] updated_port = self._update_port( [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] _ensure_no_port_binding_failure(port) [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] raise exception.PortBindingFailed(port_id=port['id']) [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] nova.exception.PortBindingFailed: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. [ 769.279984] env[61594]: ERROR nova.compute.manager [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] [ 769.281449] env[61594]: DEBUG nova.compute.utils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 769.281449] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.042s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.281541] env[61594]: INFO nova.compute.claims [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.283540] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 846e97c17c324ceda25493acc307de0c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.285772] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Build of instance 791543a0-ad32-4b51-9d21-b5cc72e480ff was re-scheduled: Binding failed for port a5a3c5a8-89c7-42ba-9904-f5dd12e4c9f3, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 769.285772] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 769.285968] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "refresh_cache-791543a0-ad32-4b51-9d21-b5cc72e480ff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.286048] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquired lock "refresh_cache-791543a0-ad32-4b51-9d21-b5cc72e480ff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.286363] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 769.286632] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg a2e24f81f0af4db1a1dfefc70b7ed97a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.295923] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2e24f81f0af4db1a1dfefc70b7ed97a [ 769.321040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 846e97c17c324ceda25493acc307de0c [ 769.322931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg f9b27a2c410b4c7fb6b760d4bd40770f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.332524] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9b27a2c410b4c7fb6b760d4bd40770f [ 769.351018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "6905ca7f-445e-45f8-8558-b119560a4216" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.351448] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "6905ca7f-445e-45f8-8558-b119560a4216" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.353349] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 41e99eab0aa246628ecab2f748a27409 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.384615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41e99eab0aa246628ecab2f748a27409 [ 769.385219] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 769.386970] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg c007483f2b5b484dacf79830e9c468a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.393893] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 769.429190] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c007483f2b5b484dacf79830e9c468a9 [ 769.451236] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 769.485925] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-138ff0a3-c74c-4787-aceb-558746b5fd91 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.496278] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d06b4e9-5438-483c-8253-b753a51424f1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.530795] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63e898e-4592-49c7-ad3b-d52488aa177f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.540016] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50513d8-be36-4068-814a-9dd932fef521 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.557905] env[61594]: DEBUG nova.compute.provider_tree [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.558548] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 4b76a8e54d1448f3b10042e7c273b72e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.567614] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b76a8e54d1448f3b10042e7c273b72e [ 769.568738] env[61594]: DEBUG nova.scheduler.client.report [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 769.571206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg a4664bf1ac3e43deb922262ea001d74b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.584193] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4664bf1ac3e43deb922262ea001d74b [ 769.585079] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.305s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.585519] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 769.587238] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg cc1918ba370a44718a99aae8e27b01fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.592672] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Successfully created port: 35fb7071-e5a9-4272-ae77-a82b6638f4d7 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 769.594339] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.143s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.598024] env[61594]: INFO nova.compute.claims [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.598024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 6b0ec1e9f6a44dfb8957b52170cb26e8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.656508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b0ec1e9f6a44dfb8957b52170cb26e8 [ 769.657221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc1918ba370a44718a99aae8e27b01fc [ 769.658874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg edc6fd5479854d3b9c20fb7c7ebfd4bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.660407] env[61594]: DEBUG nova.compute.utils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 769.660968] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 588aaed5b3d4490cb00bd78e0b1eb898 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.662596] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 769.662596] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 769.672463] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edc6fd5479854d3b9c20fb7c7ebfd4bd [ 769.678725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 588aaed5b3d4490cb00bd78e0b1eb898 [ 769.678725] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 769.678860] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 5fc3d85457d34240b3bb02951d48617e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.722019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5fc3d85457d34240b3bb02951d48617e [ 769.722463] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg b5004406ba524d3ea05a2e970b6b7fa2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.762544] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5004406ba524d3ea05a2e970b6b7fa2 [ 769.765333] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 769.795203] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 769.795465] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 769.795626] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 769.795807] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 769.795954] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 769.796118] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 769.796332] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 769.796490] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 769.796657] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 769.796818] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 769.796988] env[61594]: DEBUG nova.virt.hardware [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 769.797863] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5d22ad-9255-4588-a013-ac2f56f3ae92 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.808627] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f388ca9-8bd4-4ec6-bc49-cdfe1441505a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.813639] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6729f74-2d62-483a-9c40-c9530370f42f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.826924] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.827449] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 424a5e1d1f7742a08c8bb411add52305 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.830830] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c37bb17-e70f-422f-9dab-7213bf37017c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.836427] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 424a5e1d1f7742a08c8bb411add52305 [ 769.836965] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Releasing lock "refresh_cache-791543a0-ad32-4b51-9d21-b5cc72e480ff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 769.837201] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 769.837375] env[61594]: DEBUG nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 769.837541] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 769.863868] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fbf5fe-38c8-432d-b019-929201e21323 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.871260] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f57b40a5-4d1f-427b-8854-15f3f26da81e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.888471] env[61594]: DEBUG nova.compute.provider_tree [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.888956] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 87fad350fc1f41e9b56218074e5956da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.900932] env[61594]: DEBUG nova.policy [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c80a54c84c9445e5b0abb5ca5eedc414', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0543ecd93ed4491286c3c99f7d567ed4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 769.901699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87fad350fc1f41e9b56218074e5956da [ 769.903256] env[61594]: DEBUG nova.scheduler.client.report [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 769.907992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 8cf4674ad16e49dda90f1432b90363dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.928594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cf4674ad16e49dda90f1432b90363dc [ 769.928667] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.335s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.929194] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 769.932021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 8112480ab92f47e18273be136c898962 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.966796] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 769.967418] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg a52e7b13a7a549e881f251920605c5a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.979247] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a52e7b13a7a549e881f251920605c5a1 [ 769.980866] env[61594]: DEBUG nova.network.neutron [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.982595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 79b71508098843509dfa291bd8cdef69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.983896] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8112480ab92f47e18273be136c898962 [ 769.984992] env[61594]: DEBUG nova.compute.utils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 769.985584] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 1eb7cf1f30834ad5a07ba25c663ce6bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 769.989092] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 769.989287] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 769.992156] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79b71508098843509dfa291bd8cdef69 [ 769.992850] env[61594]: INFO nova.compute.manager [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 791543a0-ad32-4b51-9d21-b5cc72e480ff] Took 0.16 seconds to deallocate network for instance. [ 769.998196] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg bcf3b0397fc2423fb7dcc173d3a31c1c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.002052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1eb7cf1f30834ad5a07ba25c663ce6bc [ 770.002607] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 770.004695] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg ef9125dad1644c16914b649c9fcf6074 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.012715] env[61594]: ERROR nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. [ 770.012715] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 770.012715] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 770.012715] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 770.012715] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 770.012715] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 770.012715] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 770.012715] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 770.012715] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.012715] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 770.012715] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.012715] env[61594]: ERROR nova.compute.manager raise self.value [ 770.012715] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 770.012715] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 770.012715] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.012715] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 770.013168] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.013168] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 770.013168] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. [ 770.013168] env[61594]: ERROR nova.compute.manager [ 770.013168] env[61594]: Traceback (most recent call last): [ 770.013168] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 770.013168] env[61594]: listener.cb(fileno) [ 770.013168] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 770.013168] env[61594]: result = function(*args, **kwargs) [ 770.013168] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 770.013168] env[61594]: return func(*args, **kwargs) [ 770.013168] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 770.013168] env[61594]: raise e [ 770.013168] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 770.013168] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 770.013168] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 770.013168] env[61594]: created_port_ids = self._update_ports_for_instance( [ 770.013168] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 770.013168] env[61594]: with excutils.save_and_reraise_exception(): [ 770.013168] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.013168] env[61594]: self.force_reraise() [ 770.013168] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.013168] env[61594]: raise self.value [ 770.013168] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 770.013168] env[61594]: updated_port = self._update_port( [ 770.013168] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.013168] env[61594]: _ensure_no_port_binding_failure(port) [ 770.013168] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.013168] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 770.013926] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. [ 770.013926] env[61594]: Removing descriptor: 19 [ 770.013926] env[61594]: ERROR nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Traceback (most recent call last): [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] yield resources [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self.driver.spawn(context, instance, image_meta, [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.013926] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] vm_ref = self.build_virtual_machine(instance, [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] for vif in network_info: [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return self._sync_wrapper(fn, *args, **kwargs) [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self.wait() [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self[:] = self._gt.wait() [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return self._exit_event.wait() [ 770.014271] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] result = hub.switch() [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return self.greenlet.switch() [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] result = function(*args, **kwargs) [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return func(*args, **kwargs) [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] raise e [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] nwinfo = self.network_api.allocate_for_instance( [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 770.014622] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] created_port_ids = self._update_ports_for_instance( [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] with excutils.save_and_reraise_exception(): [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self.force_reraise() [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] raise self.value [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] updated_port = self._update_port( [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] _ensure_no_port_binding_failure(port) [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.014957] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] raise exception.PortBindingFailed(port_id=port['id']) [ 770.015254] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] nova.exception.PortBindingFailed: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. [ 770.015254] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] [ 770.015254] env[61594]: INFO nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Terminating instance [ 770.020272] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "refresh_cache-8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.020272] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquired lock "refresh_cache-8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.020272] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 770.020272] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg ce1ae027f30f4913b39fa424104fd6ea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.032945] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce1ae027f30f4913b39fa424104fd6ea [ 770.042873] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcf3b0397fc2423fb7dcc173d3a31c1c [ 770.044166] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef9125dad1644c16914b649c9fcf6074 [ 770.047142] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg a7a51b8b2f164f759603b1fcc2c6d395 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.050257] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg b9b5f2466a3242dbb9e1c2664b99389c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.078111] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7a51b8b2f164f759603b1fcc2c6d395 [ 770.084885] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9b5f2466a3242dbb9e1c2664b99389c [ 770.086016] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 770.109132] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 770.111821] env[61594]: INFO nova.scheduler.client.report [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Deleted allocations for instance 791543a0-ad32-4b51-9d21-b5cc72e480ff [ 770.119763] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 770.120038] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 770.120210] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 770.120392] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 770.120535] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 770.120678] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 770.120880] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 770.121043] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 770.121430] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 770.121621] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 770.122775] env[61594]: DEBUG nova.virt.hardware [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 770.123014] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de67d300-baee-42da-8c38-f725e22d795e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.127043] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 234937725bdb42289062604b06f21591 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.135722] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1707b57-8196-46f9-a900-0732e8c2936a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.152887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 234937725bdb42289062604b06f21591 [ 770.153691] env[61594]: DEBUG oslo_concurrency.lockutils [None req-626ffeab-4788-4a12-a502-6377da7152c2 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "791543a0-ad32-4b51-9d21-b5cc72e480ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.760s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.257454] env[61594]: DEBUG nova.policy [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee5a21ff43314c1a857f6958056173f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afc5e909ec5c4dd983ece5aa3236910f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 770.441620] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.441620] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 281480db71af40c3bade20cc4b6c2522 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.452025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 281480db71af40c3bade20cc4b6c2522 [ 770.452025] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Releasing lock "refresh_cache-8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.452025] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 770.452227] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 770.453536] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-213bc04e-15d0-4406-8f90-f27e3cff3414 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.464021] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c0995f-7a2b-4c60-84ed-dce5ac0d4c0e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.482726] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Successfully created port: d5333c50-e5a1-43e0-9f2b-1e477c0113f7 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 770.490954] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a could not be found. [ 770.491288] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 770.491560] env[61594]: INFO nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 770.491871] env[61594]: DEBUG oslo.service.loopingcall [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 770.492174] env[61594]: DEBUG nova.compute.manager [-] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 770.492328] env[61594]: DEBUG nova.network.neutron [-] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 770.550760] env[61594]: DEBUG nova.network.neutron [-] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 770.551327] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 90d3b5df660b461cb5b79ee12ce2bcb7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.560934] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90d3b5df660b461cb5b79ee12ce2bcb7 [ 770.561439] env[61594]: DEBUG nova.network.neutron [-] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.561915] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b76c9db773c24e7baf6d194dd1ead1ae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.571964] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b76c9db773c24e7baf6d194dd1ead1ae [ 770.572505] env[61594]: INFO nova.compute.manager [-] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Took 0.08 seconds to deallocate network for instance. [ 770.580973] env[61594]: DEBUG nova.compute.claims [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 770.581255] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.581817] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 770.586575] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 63c2b0e7cacc4f8790f5a04ba7322c1a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.636816] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63c2b0e7cacc4f8790f5a04ba7322c1a [ 770.757179] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e69a15-da92-48d8-9ce2-b63003e00458 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.765328] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12845dbd-fd19-410c-8129-55944f80b9d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.801425] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c5dae7-8341-445c-9b41-c2de867ab970 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.810778] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c66cfcf-64c4-485c-aab8-106530c7260e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.826802] env[61594]: DEBUG nova.compute.provider_tree [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 770.827480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 26d64c67cd1d4329966cfaf77b287e44 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.838055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26d64c67cd1d4329966cfaf77b287e44 [ 770.839470] env[61594]: DEBUG nova.scheduler.client.report [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 770.842414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg bf87039219024e95b236c9502e61bb95 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.856957] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf87039219024e95b236c9502e61bb95 [ 770.857765] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.276s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 770.858402] env[61594]: ERROR nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Traceback (most recent call last): [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self.driver.spawn(context, instance, image_meta, [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] vm_ref = self.build_virtual_machine(instance, [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] vif_infos = vmwarevif.get_vif_info(self._session, [ 770.858402] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] for vif in network_info: [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return self._sync_wrapper(fn, *args, **kwargs) [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self.wait() [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self[:] = self._gt.wait() [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return self._exit_event.wait() [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] result = hub.switch() [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 770.858731] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return self.greenlet.switch() [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] result = function(*args, **kwargs) [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] return func(*args, **kwargs) [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] raise e [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] nwinfo = self.network_api.allocate_for_instance( [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] created_port_ids = self._update_ports_for_instance( [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] with excutils.save_and_reraise_exception(): [ 770.859074] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] self.force_reraise() [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] raise self.value [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] updated_port = self._update_port( [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] _ensure_no_port_binding_failure(port) [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] raise exception.PortBindingFailed(port_id=port['id']) [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] nova.exception.PortBindingFailed: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. [ 770.859405] env[61594]: ERROR nova.compute.manager [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] [ 770.859674] env[61594]: DEBUG nova.compute.utils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 770.861130] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Build of instance 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a was re-scheduled: Binding failed for port 7e591090-397c-446d-bc6c-60ba035cbddd, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 770.861551] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 770.861827] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquiring lock "refresh_cache-8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 770.861990] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Acquired lock "refresh_cache-8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.862170] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 770.862569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 5631253aee2f4552869e9d6e99f82de9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 770.872253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5631253aee2f4552869e9d6e99f82de9 [ 770.942297] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 771.177675] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.177675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 648d24dc41bf4f5ba7501e0e5de80f98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 771.193019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 648d24dc41bf4f5ba7501e0e5de80f98 [ 771.193019] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Releasing lock "refresh_cache-8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 771.193019] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 771.193019] env[61594]: DEBUG nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 771.193019] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 771.250083] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 771.250083] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 6f9ed88d9437424394340de9661dc254 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 771.260831] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f9ed88d9437424394340de9661dc254 [ 771.262275] env[61594]: DEBUG nova.network.neutron [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.263835] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 3f638796e0974b0a92dd528061c7ffb1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 771.281432] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f638796e0974b0a92dd528061c7ffb1 [ 771.281432] env[61594]: INFO nova.compute.manager [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] [instance: 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a] Took 0.09 seconds to deallocate network for instance. [ 771.284631] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg be555da136f54513bc37f98fd5215190 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 771.341231] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be555da136f54513bc37f98fd5215190 [ 771.344063] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 4d8112bff9e645d0b2a12b516813a815 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 771.402723] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d8112bff9e645d0b2a12b516813a815 [ 771.413601] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Successfully created port: a050d813-7f37-4cc8-bb22-d6596b4f310a {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.452268] env[61594]: INFO nova.scheduler.client.report [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Deleted allocations for instance 8b0ba692-56df-47d6-ad26-cbfb1c3bb00a [ 771.462437] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Expecting reply to msg 405e79b7792648b380bc0c985d6fb439 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 771.487057] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 405e79b7792648b380bc0c985d6fb439 [ 771.488990] env[61594]: DEBUG oslo_concurrency.lockutils [None req-65aa97a9-3521-4ecd-b217-53071e696335 tempest-MigrationsAdminTest-1912133545 tempest-MigrationsAdminTest-1912133545-project-member] Lock "8b0ba692-56df-47d6-ad26-cbfb1c3bb00a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.323s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.719779] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Successfully created port: ef14754b-4ab7-4ab3-b64d-f4166bee7327 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 773.268550] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "3b261d28-a9b1-4551-8e39-2108e825aedc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.268811] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "3b261d28-a9b1-4551-8e39-2108e825aedc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.269569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 95392ecdb3b54c0cb8a84bca4bc755ae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.279934] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95392ecdb3b54c0cb8a84bca4bc755ae [ 773.280164] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 773.281734] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 69b9a3b73f504190942e5c5e1524956e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.316685] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 69b9a3b73f504190942e5c5e1524956e [ 773.337198] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.337458] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.338968] env[61594]: INFO nova.compute.claims [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 773.340562] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg b599cc4446b84cfdbb7a9fff98709bf2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.406123] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b599cc4446b84cfdbb7a9fff98709bf2 [ 773.406824] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 9526b0a3d3c941c0a3bd4e02a4f7210d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.424437] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9526b0a3d3c941c0a3bd4e02a4f7210d [ 773.524477] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ea4999-5d99-4872-8180-19457fab42f0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.532575] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2261083b-eb9e-42d3-831c-9a73c19959ec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.567552] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81dfb48-8a54-4bf7-9fc7-13de5210babd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.578885] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2b9197-6f0e-4d19-838d-fcba40b84363 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.595088] env[61594]: DEBUG nova.compute.provider_tree [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.595613] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 634f3eecc2bb4ac78413615a3e38f278 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.603961] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 634f3eecc2bb4ac78413615a3e38f278 [ 773.604945] env[61594]: DEBUG nova.scheduler.client.report [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 773.607242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c8f0bafd56be410a93a531bea1ed243c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.626796] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8f0bafd56be410a93a531bea1ed243c [ 773.630019] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.290s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.630019] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 773.630490] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg b1e295f094594b0aa085f62115caeb07 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.669128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1e295f094594b0aa085f62115caeb07 [ 773.669128] env[61594]: DEBUG nova.compute.utils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 773.669128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 81157d0482bd4c40b5d8d4bde27f7ca1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.670077] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 773.673636] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 773.684024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81157d0482bd4c40b5d8d4bde27f7ca1 [ 773.684024] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 773.684024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg f3322f19807042a581bf2a0833278cb7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.722537] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3322f19807042a581bf2a0833278cb7 [ 773.725259] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 35578973d10c450ca6410d27ae97187f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 773.755749] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35578973d10c450ca6410d27ae97187f [ 773.757326] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 773.780032] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 773.780326] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 773.780489] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.780672] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 773.780868] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.780951] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 773.781647] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 773.781800] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 773.781978] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 773.782163] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 773.782340] env[61594]: DEBUG nova.virt.hardware [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 773.783706] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d61b9a-c9a0-4d8b-ad22-83553f0b71e9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.793907] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e95ec35-a678-4081-9c5f-36921ccde63b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.051420] env[61594]: DEBUG nova.policy [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f05a61e0f6499bb35c44d254226249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bff3be1976444e58a2b7be93d47f50ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 776.513674] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Successfully created port: 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 777.068813] env[61594]: ERROR nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. [ 777.068813] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 777.068813] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 777.068813] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 777.068813] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 777.068813] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 777.068813] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 777.068813] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 777.068813] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.068813] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 777.068813] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.068813] env[61594]: ERROR nova.compute.manager raise self.value [ 777.068813] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 777.068813] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 777.068813] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.068813] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 777.069301] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.069301] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 777.069301] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. [ 777.069301] env[61594]: ERROR nova.compute.manager [ 777.069301] env[61594]: Traceback (most recent call last): [ 777.069301] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 777.069301] env[61594]: listener.cb(fileno) [ 777.069301] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 777.069301] env[61594]: result = function(*args, **kwargs) [ 777.069301] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 777.069301] env[61594]: return func(*args, **kwargs) [ 777.069301] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 777.069301] env[61594]: raise e [ 777.069301] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 777.069301] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 777.069301] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 777.069301] env[61594]: created_port_ids = self._update_ports_for_instance( [ 777.069301] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 777.069301] env[61594]: with excutils.save_and_reraise_exception(): [ 777.069301] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.069301] env[61594]: self.force_reraise() [ 777.069301] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.069301] env[61594]: raise self.value [ 777.069301] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 777.069301] env[61594]: updated_port = self._update_port( [ 777.069301] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.069301] env[61594]: _ensure_no_port_binding_failure(port) [ 777.069301] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.069301] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 777.070051] env[61594]: nova.exception.PortBindingFailed: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. [ 777.070051] env[61594]: Removing descriptor: 21 [ 777.072610] env[61594]: ERROR nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Traceback (most recent call last): [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] yield resources [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self.driver.spawn(context, instance, image_meta, [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] vm_ref = self.build_virtual_machine(instance, [ 777.072610] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] vif_infos = vmwarevif.get_vif_info(self._session, [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] for vif in network_info: [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return self._sync_wrapper(fn, *args, **kwargs) [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self.wait() [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self[:] = self._gt.wait() [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return self._exit_event.wait() [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 777.073667] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] result = hub.switch() [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return self.greenlet.switch() [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] result = function(*args, **kwargs) [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return func(*args, **kwargs) [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] raise e [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] nwinfo = self.network_api.allocate_for_instance( [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] created_port_ids = self._update_ports_for_instance( [ 777.074024] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] with excutils.save_and_reraise_exception(): [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self.force_reraise() [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] raise self.value [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] updated_port = self._update_port( [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] _ensure_no_port_binding_failure(port) [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] raise exception.PortBindingFailed(port_id=port['id']) [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] nova.exception.PortBindingFailed: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. [ 777.074373] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] [ 777.074716] env[61594]: INFO nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Terminating instance [ 777.079030] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "refresh_cache-9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 777.079030] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquired lock "refresh_cache-9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.079030] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 777.079030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 0f6d9a8ff3b749e78d79622d96f6d6fa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.089245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f6d9a8ff3b749e78d79622d96f6d6fa [ 777.164420] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "18087c89-5fb4-4a16-a3bb-d48712ce25f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.164691] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "18087c89-5fb4-4a16-a3bb-d48712ce25f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.165454] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg a66bb1ed7cd94fa181f5289ffd1337b6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.174615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a66bb1ed7cd94fa181f5289ffd1337b6 [ 777.175285] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 777.176865] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ecdeafa0e61543c78e4a95d1eabcb80d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.220660] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ecdeafa0e61543c78e4a95d1eabcb80d [ 777.242074] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 777.242074] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 777.245649] env[61594]: INFO nova.compute.claims [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 777.245871] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 6190f87e65384b64b8bf3b78cb1fea03 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.286414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6190f87e65384b64b8bf3b78cb1fea03 [ 777.290585] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 582699862ff64c1aba3aa53cce515075 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.297526] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 582699862ff64c1aba3aa53cce515075 [ 777.372169] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.428632] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad5523b-e43c-436a-a923-9c21539c2b30 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.438759] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c97652-62b5-4515-abae-2d5460c21674 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.480713] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7924cfd6-d75e-411c-ad1b-5addb725f588 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.489609] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599dd903-5261-43bc-a180-17a5c6cdb10e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.505579] env[61594]: DEBUG nova.compute.provider_tree [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.505954] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ea77fd72eae049198eb82fd4b056e82c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.516925] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea77fd72eae049198eb82fd4b056e82c [ 777.517913] env[61594]: DEBUG nova.scheduler.client.report [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 777.520567] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 6af1d32c11544a23a9c246ebcda21fdd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.537496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6af1d32c11544a23a9c246ebcda21fdd [ 777.538301] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.296s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.538771] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 777.540691] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 1480fcc70688473aba14aae7ebaa3508 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.583806] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1480fcc70688473aba14aae7ebaa3508 [ 777.585599] env[61594]: DEBUG nova.compute.utils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 777.586312] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ca76dc3562c149d2a17ff15649302f51 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.587422] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 777.587861] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 777.597126] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca76dc3562c149d2a17ff15649302f51 [ 777.597639] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 777.599377] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 75c7462a4a1b4f9f93ffd6e268faff54 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.633520] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 75c7462a4a1b4f9f93ffd6e268faff54 [ 777.637594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 8f448bc4a08d485487c21c2dd022f0eb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 777.670133] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f448bc4a08d485487c21c2dd022f0eb [ 777.671423] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 777.702182] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 777.702437] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 777.702593] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 777.702775] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 777.702923] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 777.703085] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 777.703505] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 777.703505] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 777.703617] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 777.703795] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 777.703929] env[61594]: DEBUG nova.virt.hardware [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 777.704815] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ce0987-c344-488a-909c-ed4ae5441603 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.716937] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05c4411-eb45-4eb8-852b-3d74f55c86d9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.996661] env[61594]: DEBUG nova.policy [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '789177a2f7be455cadec45cf03d67521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dfb77f12805418eaa6127fc75becec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 778.342179] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.342739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg e687a1f745dc49c899d8e75bd69939ba in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.358590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e687a1f745dc49c899d8e75bd69939ba [ 778.359304] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Releasing lock "refresh_cache-9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 778.359771] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 778.360145] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 778.360694] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-144cea46-daa9-4d25-884c-eab76c86be9b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.370653] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4396d4db-f33d-4128-8e06-f0d64b06c03e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.394284] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f could not be found. [ 778.394560] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 778.394739] env[61594]: INFO nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 778.395034] env[61594]: DEBUG oslo.service.loopingcall [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 778.395249] env[61594]: DEBUG nova.compute.manager [-] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 778.395356] env[61594]: DEBUG nova.network.neutron [-] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 778.447912] env[61594]: ERROR nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. [ 778.447912] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 778.447912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 778.447912] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 778.447912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 778.447912] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 778.447912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 778.447912] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 778.447912] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.447912] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 778.447912] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.447912] env[61594]: ERROR nova.compute.manager raise self.value [ 778.447912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 778.447912] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 778.447912] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.447912] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 778.448627] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.448627] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 778.448627] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. [ 778.448627] env[61594]: ERROR nova.compute.manager [ 778.448627] env[61594]: Traceback (most recent call last): [ 778.448627] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 778.448627] env[61594]: listener.cb(fileno) [ 778.448627] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 778.448627] env[61594]: result = function(*args, **kwargs) [ 778.448627] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 778.448627] env[61594]: return func(*args, **kwargs) [ 778.448627] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 778.448627] env[61594]: raise e [ 778.448627] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 778.448627] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 778.448627] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 778.448627] env[61594]: created_port_ids = self._update_ports_for_instance( [ 778.448627] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 778.448627] env[61594]: with excutils.save_and_reraise_exception(): [ 778.448627] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.448627] env[61594]: self.force_reraise() [ 778.448627] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.448627] env[61594]: raise self.value [ 778.448627] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 778.448627] env[61594]: updated_port = self._update_port( [ 778.448627] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.448627] env[61594]: _ensure_no_port_binding_failure(port) [ 778.449433] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.449433] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 778.449433] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. [ 778.449433] env[61594]: Removing descriptor: 22 [ 778.449551] env[61594]: ERROR nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Traceback (most recent call last): [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] yield resources [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self.driver.spawn(context, instance, image_meta, [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] vm_ref = self.build_virtual_machine(instance, [ 778.449551] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] for vif in network_info: [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return self._sync_wrapper(fn, *args, **kwargs) [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self.wait() [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self[:] = self._gt.wait() [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return self._exit_event.wait() [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 778.449860] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] result = hub.switch() [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return self.greenlet.switch() [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] result = function(*args, **kwargs) [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return func(*args, **kwargs) [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] raise e [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] nwinfo = self.network_api.allocate_for_instance( [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] created_port_ids = self._update_ports_for_instance( [ 778.452049] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] with excutils.save_and_reraise_exception(): [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self.force_reraise() [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] raise self.value [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] updated_port = self._update_port( [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] _ensure_no_port_binding_failure(port) [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] raise exception.PortBindingFailed(port_id=port['id']) [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] nova.exception.PortBindingFailed: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. [ 778.452454] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] [ 778.452881] env[61594]: INFO nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Terminating instance [ 778.452881] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-b6ee7d2a-dec8-4dad-b220-483e3313da31" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.452881] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-b6ee7d2a-dec8-4dad-b220-483e3313da31" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.452881] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 778.453373] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 7766b1bdfece48c79d7e9730cfc7e51d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.466408] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7766b1bdfece48c79d7e9730cfc7e51d [ 778.555820] env[61594]: DEBUG nova.network.neutron [-] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 778.556403] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ab66405e026348cdb88f66818968798e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.563341] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ab66405e026348cdb88f66818968798e [ 778.563765] env[61594]: DEBUG nova.network.neutron [-] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 778.564194] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 450dacc56a014a91a3828e19103492d4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.574009] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 450dacc56a014a91a3828e19103492d4 [ 778.574476] env[61594]: INFO nova.compute.manager [-] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Took 0.18 seconds to deallocate network for instance. [ 778.576510] env[61594]: DEBUG nova.compute.claims [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 778.576684] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.576896] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.579036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 5ab5204ef9ad4cd6af01239de29a50e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.584018] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 778.630894] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ab5204ef9ad4cd6af01239de29a50e3 [ 778.775058] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b07cd9e-b534-4cab-8efd-f72f4d44ca4d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.787285] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e906e120-f50b-410f-9dcf-5d1db3294424 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.830152] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74d3a1a-12d3-4c04-a434-d554046fbfcf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.835776] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0272e458-3dce-4b60-a45e-49aae5254db6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.852967] env[61594]: DEBUG nova.compute.provider_tree [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 778.853851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 92f854fc12714717a0dc04d98e66670b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.867439] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 92f854fc12714717a0dc04d98e66670b [ 778.868598] env[61594]: DEBUG nova.scheduler.client.report [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 778.874815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg b23f577eb46b40dbb73a3ed9648ec299 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.932582] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b23f577eb46b40dbb73a3ed9648ec299 [ 778.932787] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.356s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.933446] env[61594]: ERROR nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Traceback (most recent call last): [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self.driver.spawn(context, instance, image_meta, [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] vm_ref = self.build_virtual_machine(instance, [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] vif_infos = vmwarevif.get_vif_info(self._session, [ 778.933446] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] for vif in network_info: [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return self._sync_wrapper(fn, *args, **kwargs) [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self.wait() [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self[:] = self._gt.wait() [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return self._exit_event.wait() [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] result = hub.switch() [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 778.933813] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return self.greenlet.switch() [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] result = function(*args, **kwargs) [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] return func(*args, **kwargs) [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] raise e [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] nwinfo = self.network_api.allocate_for_instance( [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] created_port_ids = self._update_ports_for_instance( [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] with excutils.save_and_reraise_exception(): [ 778.934186] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] self.force_reraise() [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] raise self.value [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] updated_port = self._update_port( [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] _ensure_no_port_binding_failure(port) [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] raise exception.PortBindingFailed(port_id=port['id']) [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] nova.exception.PortBindingFailed: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. [ 778.934535] env[61594]: ERROR nova.compute.manager [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] [ 778.934817] env[61594]: DEBUG nova.compute.utils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 778.940064] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Build of instance 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f was re-scheduled: Binding failed for port d5333c50-e5a1-43e0-9f2b-1e477c0113f7, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 778.940420] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 778.940817] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquiring lock "refresh_cache-9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 778.940817] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Acquired lock "refresh_cache-9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.940972] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 778.942747] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 8bcdf4cd330a4f6a8c081b675437a72f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 778.952813] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8bcdf4cd330a4f6a8c081b675437a72f [ 779.000308] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "1eebe96f-c03f-4069-99d0-ea2a50de6f35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.000308] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "1eebe96f-c03f-4069-99d0-ea2a50de6f35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.000308] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg dd27ff9ae04e49af9e9f1f106e588600 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.011465] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd27ff9ae04e49af9e9f1f106e588600 [ 779.012116] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 779.014020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 0da4843143064699af6d9263b66e74fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.018634] env[61594]: ERROR nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. [ 779.018634] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 779.018634] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 779.018634] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 779.018634] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.018634] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 779.018634] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.018634] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 779.018634] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.018634] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 779.018634] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.018634] env[61594]: ERROR nova.compute.manager raise self.value [ 779.018634] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.018634] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 779.018634] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.018634] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 779.019034] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.019034] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 779.019034] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. [ 779.019034] env[61594]: ERROR nova.compute.manager [ 779.019034] env[61594]: Traceback (most recent call last): [ 779.019034] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 779.019034] env[61594]: listener.cb(fileno) [ 779.019034] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 779.019034] env[61594]: result = function(*args, **kwargs) [ 779.019034] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.019034] env[61594]: return func(*args, **kwargs) [ 779.019034] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 779.019034] env[61594]: raise e [ 779.019034] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 779.019034] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 779.019034] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.019034] env[61594]: created_port_ids = self._update_ports_for_instance( [ 779.019034] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.019034] env[61594]: with excutils.save_and_reraise_exception(): [ 779.019034] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.019034] env[61594]: self.force_reraise() [ 779.019034] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.019034] env[61594]: raise self.value [ 779.019034] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.019034] env[61594]: updated_port = self._update_port( [ 779.019034] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.019034] env[61594]: _ensure_no_port_binding_failure(port) [ 779.019034] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.019034] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 779.019849] env[61594]: nova.exception.PortBindingFailed: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. [ 779.019849] env[61594]: Removing descriptor: 17 [ 779.019849] env[61594]: ERROR nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Traceback (most recent call last): [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] yield resources [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self.driver.spawn(context, instance, image_meta, [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 779.019849] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] vm_ref = self.build_virtual_machine(instance, [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] vif_infos = vmwarevif.get_vif_info(self._session, [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] for vif in network_info: [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return self._sync_wrapper(fn, *args, **kwargs) [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self.wait() [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self[:] = self._gt.wait() [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return self._exit_event.wait() [ 779.020158] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] result = hub.switch() [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return self.greenlet.switch() [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] result = function(*args, **kwargs) [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return func(*args, **kwargs) [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] raise e [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] nwinfo = self.network_api.allocate_for_instance( [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.020594] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] created_port_ids = self._update_ports_for_instance( [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] with excutils.save_and_reraise_exception(): [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self.force_reraise() [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] raise self.value [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] updated_port = self._update_port( [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] _ensure_no_port_binding_failure(port) [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.022063] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] raise exception.PortBindingFailed(port_id=port['id']) [ 779.022524] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] nova.exception.PortBindingFailed: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. [ 779.022524] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] [ 779.022524] env[61594]: INFO nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Terminating instance [ 779.022524] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Acquiring lock "refresh_cache-017f274d-c305-4aff-977a-c8bb2827880f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.022524] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Acquired lock "refresh_cache-017f274d-c305-4aff-977a-c8bb2827880f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.022524] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 779.022763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 4906b53512394f3486a9ae0bb03f6370 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.028233] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 779.031699] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4906b53512394f3486a9ae0bb03f6370 [ 779.064569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0da4843143064699af6d9263b66e74fe [ 779.096192] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.096192] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.097854] env[61594]: INFO nova.compute.claims [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 779.100127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 9f591388c56a42a885cf5f9e60d1e603 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.150540] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 779.153416] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f591388c56a42a885cf5f9e60d1e603 [ 779.155550] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg f85d2d4cee40455ab4df9643934c819c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.170526] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f85d2d4cee40455ab4df9643934c819c [ 779.261163] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.261788] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 7b3970030d304c40b853f4c65471cda8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.276449] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b3970030d304c40b853f4c65471cda8 [ 779.276840] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-b6ee7d2a-dec8-4dad-b220-483e3313da31" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 779.277236] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 779.277441] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 779.277970] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-46ee4684-b4f7-473c-a0bf-c686cd0bbbc4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.291625] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25fa978-2509-4097-b1a7-39e05a918d4d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.318044] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6ee7d2a-dec8-4dad-b220-483e3313da31 could not be found. [ 779.318298] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 779.318475] env[61594]: INFO nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Took 0.04 seconds to destroy the instance on the hypervisor. [ 779.318715] env[61594]: DEBUG oslo.service.loopingcall [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 779.320226] env[61594]: DEBUG nova.compute.manager [-] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 779.320226] env[61594]: DEBUG nova.network.neutron [-] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 779.322170] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e071fa1d-322e-47f5-a41b-10a293bc1ed8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.328848] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374b8280-5f4a-401d-90af-0d94d7abb36e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.362226] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25942a2-8fa9-45ac-94a1-a27a8e0e8850 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.371487] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4baa07-a32f-4d4a-a446-3c8b51198441 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.384829] env[61594]: DEBUG nova.compute.provider_tree [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.385374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg ead8a14ca5b84d9a89f4988904c244e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.409326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ead8a14ca5b84d9a89f4988904c244e3 [ 779.410726] env[61594]: DEBUG nova.scheduler.client.report [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 779.419388] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 9456260d4b19403bb61552fabe03b715 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.445858] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9456260d4b19403bb61552fabe03b715 [ 779.447517] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.351s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.448313] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 779.450463] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 60443904b204418dafd31e9b770212e2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.531521] env[61594]: DEBUG nova.network.neutron [-] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 779.532245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 548d0e57b3d042ccb691ef04c6f9f3fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.544014] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 548d0e57b3d042ccb691ef04c6f9f3fc [ 779.545503] env[61594]: DEBUG nova.network.neutron [-] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.545503] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cfb76cade8d74f9692805828375a3629 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.563647] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfb76cade8d74f9692805828375a3629 [ 779.563647] env[61594]: INFO nova.compute.manager [-] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Took 0.24 seconds to deallocate network for instance. [ 779.564434] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60443904b204418dafd31e9b770212e2 [ 779.566973] env[61594]: DEBUG nova.compute.utils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 779.566973] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 61faa59f0e5d47d69f704d8800bda75f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.567762] env[61594]: DEBUG nova.compute.claims [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 779.568050] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 779.568321] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 779.570386] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg b0a04dd44ed345489789db47d332bbd2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.572895] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 779.572895] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 779.597780] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61faa59f0e5d47d69f704d8800bda75f [ 779.598831] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 779.602398] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 9d51a20260e7400eb407d57ed782314e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.623455] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0a04dd44ed345489789db47d332bbd2 [ 779.634274] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d51a20260e7400eb407d57ed782314e [ 779.637943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 109724239c3044079bc346dc5b473647 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.679387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 109724239c3044079bc346dc5b473647 [ 779.680525] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 779.711315] env[61594]: ERROR nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. [ 779.711315] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 779.711315] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 779.711315] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 779.711315] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.711315] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 779.711315] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.711315] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 779.711315] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.711315] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 779.711315] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.711315] env[61594]: ERROR nova.compute.manager raise self.value [ 779.711315] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.711315] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 779.711315] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.711315] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 779.711837] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.711837] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 779.711837] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. [ 779.711837] env[61594]: ERROR nova.compute.manager [ 779.711837] env[61594]: Traceback (most recent call last): [ 779.711837] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 779.711837] env[61594]: listener.cb(fileno) [ 779.711837] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 779.711837] env[61594]: result = function(*args, **kwargs) [ 779.711837] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.711837] env[61594]: return func(*args, **kwargs) [ 779.711837] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 779.711837] env[61594]: raise e [ 779.711837] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 779.711837] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 779.711837] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.711837] env[61594]: created_port_ids = self._update_ports_for_instance( [ 779.711837] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.711837] env[61594]: with excutils.save_and_reraise_exception(): [ 779.711837] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.711837] env[61594]: self.force_reraise() [ 779.711837] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.711837] env[61594]: raise self.value [ 779.711837] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.711837] env[61594]: updated_port = self._update_port( [ 779.711837] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.711837] env[61594]: _ensure_no_port_binding_failure(port) [ 779.711837] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.711837] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 779.712712] env[61594]: nova.exception.PortBindingFailed: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. [ 779.712712] env[61594]: Removing descriptor: 20 [ 779.715441] env[61594]: ERROR nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Traceback (most recent call last): [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] yield resources [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self.driver.spawn(context, instance, image_meta, [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self._vmops.spawn(context, instance, image_meta, injected_files, [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] vm_ref = self.build_virtual_machine(instance, [ 779.715441] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] vif_infos = vmwarevif.get_vif_info(self._session, [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] for vif in network_info: [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return self._sync_wrapper(fn, *args, **kwargs) [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self.wait() [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self[:] = self._gt.wait() [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return self._exit_event.wait() [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 779.715878] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] result = hub.switch() [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return self.greenlet.switch() [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] result = function(*args, **kwargs) [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return func(*args, **kwargs) [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] raise e [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] nwinfo = self.network_api.allocate_for_instance( [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] created_port_ids = self._update_ports_for_instance( [ 779.716264] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] with excutils.save_and_reraise_exception(): [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self.force_reraise() [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] raise self.value [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] updated_port = self._update_port( [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] _ensure_no_port_binding_failure(port) [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] raise exception.PortBindingFailed(port_id=port['id']) [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] nova.exception.PortBindingFailed: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. [ 779.716605] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] [ 779.717655] env[61594]: INFO nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Terminating instance [ 779.718863] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-6905ca7f-445e-45f8-8558-b119560a4216" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.718863] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-6905ca7f-445e-45f8-8558-b119560a4216" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.718863] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 779.720331] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 954195a959fa4dfeb46ab570f0717619 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.725280] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 779.725542] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 779.725711] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 779.725899] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 779.726060] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 779.726213] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 779.726427] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 779.726739] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 779.726739] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 779.726909] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 779.727089] env[61594]: DEBUG nova.virt.hardware [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 779.728825] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045bc5d3-08b0-4363-882a-b81c3e531201 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.734691] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 954195a959fa4dfeb46ab570f0717619 [ 779.741870] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60633814-fa1a-45a5-9c82-9e0176f43d1d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.791049] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0187ddf-ccb1-479b-bc7a-3e7b5cd7c486 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.800553] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a3fc71-b57f-4029-9b88-5bc3a9fed728 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.832940] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3737039-228e-4ed7-86b9-e97bd10c769e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.841182] env[61594]: DEBUG nova.policy [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d6b88b1f4394a63b32be00fc9dcc761', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57787a709d744ea4a19a2cfb923d89d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 779.843720] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17339aac-cdb6-4c13-8eef-f9e0098e528a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.857014] env[61594]: DEBUG nova.compute.provider_tree [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 779.857605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 49bd56dc913d47d2a63ec55593a933e2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.861971] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 779.867408] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49bd56dc913d47d2a63ec55593a933e2 [ 779.868419] env[61594]: DEBUG nova.scheduler.client.report [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 779.871084] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 0beec3dac669419dadf56534958ad023 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.889344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0beec3dac669419dadf56534958ad023 [ 779.890331] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.890811] env[61594]: ERROR nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Traceback (most recent call last): [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self.driver.spawn(context, instance, image_meta, [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self._vmops.spawn(context, instance, image_meta, injected_files, [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] vm_ref = self.build_virtual_machine(instance, [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] vif_infos = vmwarevif.get_vif_info(self._session, [ 779.890811] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] for vif in network_info: [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return self._sync_wrapper(fn, *args, **kwargs) [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self.wait() [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self[:] = self._gt.wait() [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return self._exit_event.wait() [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] result = hub.switch() [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 779.891276] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return self.greenlet.switch() [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] result = function(*args, **kwargs) [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] return func(*args, **kwargs) [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] raise e [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] nwinfo = self.network_api.allocate_for_instance( [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] created_port_ids = self._update_ports_for_instance( [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] with excutils.save_and_reraise_exception(): [ 779.891784] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] self.force_reraise() [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] raise self.value [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] updated_port = self._update_port( [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] _ensure_no_port_binding_failure(port) [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] raise exception.PortBindingFailed(port_id=port['id']) [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] nova.exception.PortBindingFailed: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. [ 779.892303] env[61594]: ERROR nova.compute.manager [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] [ 779.892730] env[61594]: DEBUG nova.compute.utils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 779.893362] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Build of instance b6ee7d2a-dec8-4dad-b220-483e3313da31 was re-scheduled: Binding failed for port 35fb7071-e5a9-4272-ae77-a82b6638f4d7, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 779.893535] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 779.893750] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-b6ee7d2a-dec8-4dad-b220-483e3313da31" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 779.893902] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-b6ee7d2a-dec8-4dad-b220-483e3313da31" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.894075] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 779.894861] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 96772ea76dc2422abd4bf2061ca2cdb7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 779.905972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96772ea76dc2422abd4bf2061ca2cdb7 [ 780.041944] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.042415] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 61f2e6fed1974114a2955dff6d57c89a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.051748] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61f2e6fed1974114a2955dff6d57c89a [ 780.052355] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Releasing lock "refresh_cache-9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.052557] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 780.052744] env[61594]: DEBUG nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 780.052911] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 780.102117] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.115084] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.115814] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg ce0df537545f44c493a061a960ea48aa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.125670] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce0df537545f44c493a061a960ea48aa [ 780.126269] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Releasing lock "refresh_cache-017f274d-c305-4aff-977a-c8bb2827880f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.126660] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 780.126852] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 780.127376] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f699d22f-c2a2-47a8-b7bb-dd9e0e3b5cc9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.138672] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f085e448-9f04-4ce6-ad26-de03503a20fc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.152888] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.153535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 174d750d98c74fedb95eb98b17b7bcd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.161811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 174d750d98c74fedb95eb98b17b7bcd1 [ 780.162296] env[61594]: DEBUG nova.network.neutron [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.162813] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 1f5ea317790e4fad949b7c9d7e7303a8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.174218] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f5ea317790e4fad949b7c9d7e7303a8 [ 780.174920] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 017f274d-c305-4aff-977a-c8bb2827880f could not be found. [ 780.175221] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 780.175389] env[61594]: INFO nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Took 0.05 seconds to destroy the instance on the hypervisor. [ 780.175516] env[61594]: DEBUG oslo.service.loopingcall [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 780.176107] env[61594]: INFO nova.compute.manager [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] [instance: 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f] Took 0.12 seconds to deallocate network for instance. [ 780.177951] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg e512a8fd4a7e4d02b8f0e70acdc03254 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.182015] env[61594]: DEBUG nova.compute.manager [-] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 780.182015] env[61594]: DEBUG nova.network.neutron [-] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 780.236062] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e512a8fd4a7e4d02b8f0e70acdc03254 [ 780.236062] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 961b8ac911ac4242981b4545e55e4f32 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.280385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 961b8ac911ac4242981b4545e55e4f32 [ 780.296071] env[61594]: DEBUG nova.network.neutron [-] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.297181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg eaf10185b46d437f8acb2b8d5c6b5cd8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.306700] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eaf10185b46d437f8acb2b8d5c6b5cd8 [ 780.307151] env[61594]: DEBUG nova.network.neutron [-] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.307593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3ec0acc2116b4363b94adb2b13c6e94d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.313198] env[61594]: INFO nova.scheduler.client.report [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Deleted allocations for instance 9edf2808-0bc5-4951-9d0c-7806a4ec1b9f [ 780.319329] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ec0acc2116b4363b94adb2b13c6e94d [ 780.319978] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Expecting reply to msg 6e8d66d41852432999762b4edd8b406b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.320766] env[61594]: INFO nova.compute.manager [-] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Took 0.14 seconds to deallocate network for instance. [ 780.322842] env[61594]: DEBUG nova.compute.claims [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 780.323549] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.323861] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.325617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 0b5bd1023a55421bb7c338dd2ffcabdd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.335541] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e8d66d41852432999762b4edd8b406b [ 780.336347] env[61594]: DEBUG oslo_concurrency.lockutils [None req-126df6f1-5b4e-487d-b274-cbfae3442d95 tempest-ServerRescueNegativeTestJSON-1171749554 tempest-ServerRescueNegativeTestJSON-1171749554-project-member] Lock "9edf2808-0bc5-4951-9d0c-7806a4ec1b9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.634s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.436510] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Successfully created port: b4396922-ab16-4a9a-82f6-838138d4a038 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 780.451529] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b5bd1023a55421bb7c338dd2ffcabdd [ 780.608305] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4197bdf-cfc1-41e3-95e3-77729c8d2752 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.617284] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4752b3df-695d-4958-9961-2fcd4b5f3d26 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.660508] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a6e429-4606-4a5b-ad30-b539600435eb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.668022] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dcef66f-3020-4c0d-b6a2-d7fdedb5105d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.683025] env[61594]: DEBUG nova.compute.provider_tree [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.684326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 0bc3d26a13314d95a5c8960df67725a2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.702611] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0bc3d26a13314d95a5c8960df67725a2 [ 780.702611] env[61594]: DEBUG nova.scheduler.client.report [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 780.705504] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 0b23bd359a384e0b9d1008452c87e3f9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.717351] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b23bd359a384e0b9d1008452c87e3f9 [ 780.719812] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.394s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.719812] env[61594]: ERROR nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. [ 780.719812] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Traceback (most recent call last): [ 780.719812] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 780.719812] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self.driver.spawn(context, instance, image_meta, [ 780.719812] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 780.719812] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 780.719812] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 780.719812] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] vm_ref = self.build_virtual_machine(instance, [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] vif_infos = vmwarevif.get_vif_info(self._session, [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] for vif in network_info: [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return self._sync_wrapper(fn, *args, **kwargs) [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self.wait() [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self[:] = self._gt.wait() [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return self._exit_event.wait() [ 780.720196] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] result = hub.switch() [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return self.greenlet.switch() [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] result = function(*args, **kwargs) [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] return func(*args, **kwargs) [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] raise e [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] nwinfo = self.network_api.allocate_for_instance( [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 780.720589] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] created_port_ids = self._update_ports_for_instance( [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] with excutils.save_and_reraise_exception(): [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] self.force_reraise() [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] raise self.value [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] updated_port = self._update_port( [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] _ensure_no_port_binding_failure(port) [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 780.720904] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] raise exception.PortBindingFailed(port_id=port['id']) [ 780.721223] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] nova.exception.PortBindingFailed: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. [ 780.721223] env[61594]: ERROR nova.compute.manager [instance: 017f274d-c305-4aff-977a-c8bb2827880f] [ 780.721223] env[61594]: DEBUG nova.compute.utils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 780.727095] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Build of instance 017f274d-c305-4aff-977a-c8bb2827880f was re-scheduled: Binding failed for port a050d813-7f37-4cc8-bb22-d6596b4f310a, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 780.727095] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 780.727095] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Acquiring lock "refresh_cache-017f274d-c305-4aff-977a-c8bb2827880f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.727095] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Acquired lock "refresh_cache-017f274d-c305-4aff-977a-c8bb2827880f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.727929] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 780.727929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg df862f764cb54b82848db3167f292104 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.735715] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df862f764cb54b82848db3167f292104 [ 780.855928] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 780.856428] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 2b3d8d49c339415aa3b370dae32b531c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 780.870870] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b3d8d49c339415aa3b370dae32b531c [ 780.871742] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-6905ca7f-445e-45f8-8558-b119560a4216" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 780.872026] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 780.872307] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 780.872843] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95e86e66-b253-45ec-9679-7938aeb82bad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.883836] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9ce95b-1434-4ca9-bd04-7884abfcf783 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.914060] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6905ca7f-445e-45f8-8558-b119560a4216 could not be found. [ 780.914060] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 780.914060] env[61594]: INFO nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Took 0.04 seconds to destroy the instance on the hypervisor. [ 780.914060] env[61594]: DEBUG oslo.service.loopingcall [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 780.914572] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 780.916540] env[61594]: DEBUG nova.compute.manager [-] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 780.916926] env[61594]: DEBUG nova.network.neutron [-] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 781.005021] env[61594]: DEBUG nova.network.neutron [-] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.005021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2b3fc4ecc3a44d2688bd00d8085a3727 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.013250] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b3fc4ecc3a44d2688bd00d8085a3727 [ 781.013250] env[61594]: DEBUG nova.network.neutron [-] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.013250] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ff6197d6040e498b8c551019fad57672 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.025670] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ff6197d6040e498b8c551019fad57672 [ 781.026255] env[61594]: INFO nova.compute.manager [-] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Took 0.11 seconds to deallocate network for instance. [ 781.028813] env[61594]: DEBUG nova.compute.claims [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 781.029259] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.029497] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.034937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 28f2ac15acfe4d96b053f01da3065a4b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.082248] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28f2ac15acfe4d96b053f01da3065a4b [ 781.174571] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.174571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 55ab363bafcc4d7598a6a3d60c8bd10d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.191580] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55ab363bafcc4d7598a6a3d60c8bd10d [ 781.192869] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-b6ee7d2a-dec8-4dad-b220-483e3313da31" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.192869] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 781.192869] env[61594]: DEBUG nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 781.192981] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 781.243685] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d1d04d0-0a1d-47af-8273-809dd1f826c7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.252366] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28786c4-4a11-4b33-943e-104985fb71c7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.295403] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.295871] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 029ec793ceeb4c1f817144d71962098b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.298685] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfcda84-3a99-4b12-94c2-5d7e5ac73c0c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.308036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 029ec793ceeb4c1f817144d71962098b [ 781.309171] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-932b3bbc-9b14-43b5-8173-0c0c53950aa4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.314420] env[61594]: DEBUG nova.network.neutron [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.314420] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg a763aef1de324e9e887ea8bf099ef214 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.327424] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a763aef1de324e9e887ea8bf099ef214 [ 781.327563] env[61594]: DEBUG nova.compute.provider_tree [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.328074] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg a240b9ca491942f4bb206b968277ee74 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.329195] env[61594]: INFO nova.compute.manager [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: b6ee7d2a-dec8-4dad-b220-483e3313da31] Took 0.14 seconds to deallocate network for instance. [ 781.330979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 8e8d2ffd78674ec98c2d40d85b96d9fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.340024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a240b9ca491942f4bb206b968277ee74 [ 781.341125] env[61594]: DEBUG nova.scheduler.client.report [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 781.344334] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 82986e1b4e0c4f28a68085143f98e150 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.366030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82986e1b4e0c4f28a68085143f98e150 [ 781.366030] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.334s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.366030] env[61594]: ERROR nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. [ 781.366030] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Traceback (most recent call last): [ 781.366030] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 781.366030] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self.driver.spawn(context, instance, image_meta, [ 781.366030] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 781.366030] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.366030] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] vm_ref = self.build_virtual_machine(instance, [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] vif_infos = vmwarevif.get_vif_info(self._session, [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] for vif in network_info: [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return self._sync_wrapper(fn, *args, **kwargs) [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self.wait() [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self[:] = self._gt.wait() [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 781.366608] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return self._exit_event.wait() [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] result = hub.switch() [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return self.greenlet.switch() [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] result = function(*args, **kwargs) [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] return func(*args, **kwargs) [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] raise e [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] nwinfo = self.network_api.allocate_for_instance( [ 781.366951] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] created_port_ids = self._update_ports_for_instance( [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] with excutils.save_and_reraise_exception(): [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] self.force_reraise() [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] raise self.value [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] updated_port = self._update_port( [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] _ensure_no_port_binding_failure(port) [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.367339] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] raise exception.PortBindingFailed(port_id=port['id']) [ 781.367748] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] nova.exception.PortBindingFailed: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. [ 781.367748] env[61594]: ERROR nova.compute.manager [instance: 6905ca7f-445e-45f8-8558-b119560a4216] [ 781.367748] env[61594]: DEBUG nova.compute.utils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 781.372184] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Build of instance 6905ca7f-445e-45f8-8558-b119560a4216 was re-scheduled: Binding failed for port ef14754b-4ab7-4ab3-b64d-f4166bee7327, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 781.372184] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 781.372184] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquiring lock "refresh_cache-6905ca7f-445e-45f8-8558-b119560a4216" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.372184] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Acquired lock "refresh_cache-6905ca7f-445e-45f8-8558-b119560a4216" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 781.372426] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 781.372749] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg ad58ee83ff304139a985d16731930088 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.389983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad58ee83ff304139a985d16731930088 [ 781.409628] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e8d2ffd78674ec98c2d40d85b96d9fe [ 781.416180] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 5d73e0b884bc47cc970727b0d433581d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.476629] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d73e0b884bc47cc970727b0d433581d [ 781.523497] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.538613] env[61594]: INFO nova.scheduler.client.report [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Deleted allocations for instance b6ee7d2a-dec8-4dad-b220-483e3313da31 [ 781.550900] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 48c6e6828648438d8f7da794f662e859 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.585904] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48c6e6828648438d8f7da794f662e859 [ 781.585904] env[61594]: DEBUG oslo_concurrency.lockutils [None req-093ecb04-435b-4dfd-bce5-0d61b635343c tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "b6ee7d2a-dec8-4dad-b220-483e3313da31" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.105s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.845210] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.845756] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 4acd2675fcaf442d875890b2b97067c2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.856395] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4acd2675fcaf442d875890b2b97067c2 [ 781.857057] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Releasing lock "refresh_cache-017f274d-c305-4aff-977a-c8bb2827880f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 781.857276] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 781.857467] env[61594]: DEBUG nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 781.857638] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 781.927018] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.927018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 60f24c741f8f4d3aa860d82a17392972 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.941644] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60f24c741f8f4d3aa860d82a17392972 [ 781.942307] env[61594]: DEBUG nova.network.neutron [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.943218] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 0106a3f1f1894af486ab15ef4a6991d0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 781.957186] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0106a3f1f1894af486ab15ef4a6991d0 [ 781.957186] env[61594]: INFO nova.compute.manager [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] [instance: 017f274d-c305-4aff-977a-c8bb2827880f] Took 0.10 seconds to deallocate network for instance. [ 781.958791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg 9ac978d7df5b47d08b455674602cfe03 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.023030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ac978d7df5b47d08b455674602cfe03 [ 782.025677] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg cdb988f2dd1f4e07bec398302cb77861 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.068722] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cdb988f2dd1f4e07bec398302cb77861 [ 782.105111] env[61594]: INFO nova.scheduler.client.report [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Deleted allocations for instance 017f274d-c305-4aff-977a-c8bb2827880f [ 782.116027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Expecting reply to msg ae1a5d4454d04379ac098c48267bc623 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.137789] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae1a5d4454d04379ac098c48267bc623 [ 782.138498] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bf15dfa0-9f6b-413d-acac-daef7b069f4f tempest-AttachInterfacesV270Test-749677004 tempest-AttachInterfacesV270Test-749677004-project-member] Lock "017f274d-c305-4aff-977a-c8bb2827880f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.041s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.419173] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.419173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 208e86e9657744e291db6f50ef2697ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.430739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 208e86e9657744e291db6f50ef2697ed [ 782.432159] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Releasing lock "refresh_cache-6905ca7f-445e-45f8-8558-b119560a4216" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.432159] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 782.432159] env[61594]: DEBUG nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 782.432159] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 782.564721] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Successfully created port: bd590870-c0eb-4002-80ae-72ba3ded7e91 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.567359] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 782.569118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 5b9c3269b2044442a764ef7eba2a0209 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.578986] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b9c3269b2044442a764ef7eba2a0209 [ 782.579589] env[61594]: DEBUG nova.network.neutron [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 782.580066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 6561741450db4b94b6ca74beeb1b4044 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.602610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6561741450db4b94b6ca74beeb1b4044 [ 782.603102] env[61594]: INFO nova.compute.manager [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] [instance: 6905ca7f-445e-45f8-8558-b119560a4216] Took 0.17 seconds to deallocate network for instance. [ 782.604773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 7ecbaf1200554d08980db9ac9082f72b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.652773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ecbaf1200554d08980db9ac9082f72b [ 782.655532] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 1930ea22b4c5488084d37cd2eca28db4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.696552] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1930ea22b4c5488084d37cd2eca28db4 [ 782.728663] env[61594]: INFO nova.scheduler.client.report [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Deleted allocations for instance 6905ca7f-445e-45f8-8558-b119560a4216 [ 782.738028] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Expecting reply to msg 638f4e6d980a4318a5813b010270e750 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 782.755192] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 638f4e6d980a4318a5813b010270e750 [ 782.755783] env[61594]: DEBUG oslo_concurrency.lockutils [None req-8bbbdfac-e3d1-4b30-a07e-ae5e9dad2824 tempest-ServerDiskConfigTestJSON-779666606 tempest-ServerDiskConfigTestJSON-779666606-project-member] Lock "6905ca7f-445e-45f8-8558-b119560a4216" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.404s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 785.964846] env[61594]: ERROR nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. [ 785.964846] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 785.964846] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 785.964846] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 785.964846] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 785.964846] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 785.964846] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 785.964846] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 785.964846] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 785.964846] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 785.964846] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 785.964846] env[61594]: ERROR nova.compute.manager raise self.value [ 785.964846] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 785.964846] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 785.964846] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 785.964846] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 785.965523] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 785.965523] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 785.965523] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. [ 785.965523] env[61594]: ERROR nova.compute.manager [ 785.965523] env[61594]: Traceback (most recent call last): [ 785.965523] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 785.965523] env[61594]: listener.cb(fileno) [ 785.965523] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 785.965523] env[61594]: result = function(*args, **kwargs) [ 785.965523] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 785.965523] env[61594]: return func(*args, **kwargs) [ 785.965523] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 785.965523] env[61594]: raise e [ 785.965523] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 785.965523] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 785.965523] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 785.965523] env[61594]: created_port_ids = self._update_ports_for_instance( [ 785.965523] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 785.965523] env[61594]: with excutils.save_and_reraise_exception(): [ 785.965523] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 785.965523] env[61594]: self.force_reraise() [ 785.965523] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 785.965523] env[61594]: raise self.value [ 785.965523] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 785.965523] env[61594]: updated_port = self._update_port( [ 785.965523] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 785.965523] env[61594]: _ensure_no_port_binding_failure(port) [ 785.965523] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 785.965523] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 785.966296] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. [ 785.966296] env[61594]: Removing descriptor: 19 [ 785.966296] env[61594]: ERROR nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Traceback (most recent call last): [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] yield resources [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self.driver.spawn(context, instance, image_meta, [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 785.966296] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] vm_ref = self.build_virtual_machine(instance, [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] vif_infos = vmwarevif.get_vif_info(self._session, [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] for vif in network_info: [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return self._sync_wrapper(fn, *args, **kwargs) [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self.wait() [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self[:] = self._gt.wait() [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return self._exit_event.wait() [ 785.966611] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] result = hub.switch() [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return self.greenlet.switch() [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] result = function(*args, **kwargs) [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return func(*args, **kwargs) [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] raise e [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] nwinfo = self.network_api.allocate_for_instance( [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 785.966930] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] created_port_ids = self._update_ports_for_instance( [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] with excutils.save_and_reraise_exception(): [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self.force_reraise() [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] raise self.value [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] updated_port = self._update_port( [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] _ensure_no_port_binding_failure(port) [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 785.967393] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] raise exception.PortBindingFailed(port_id=port['id']) [ 785.967723] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] nova.exception.PortBindingFailed: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. [ 785.967723] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] [ 785.967723] env[61594]: INFO nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Terminating instance [ 785.972064] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 785.972064] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.972064] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 785.972064] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 8b90d10084814abebf369008a008ab3b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 785.982483] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b90d10084814abebf369008a008ab3b [ 786.052469] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.454891] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.454891] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 80ae9854b5fe4de8ab7a5eb5162d9042 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.475119] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80ae9854b5fe4de8ab7a5eb5162d9042 [ 786.475949] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.476307] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 786.476524] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 786.477093] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95738177-a1b4-4ea1-82e4-d3cbbbb6ebf5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.494187] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2cbaba-5f87-41b7-b147-ae4c7667aa00 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.513445] env[61594]: DEBUG nova.compute.manager [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Received event network-changed-120b7e4f-ee57-4bf0-a4a9-ecf2ab820872 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 786.513659] env[61594]: DEBUG nova.compute.manager [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Refreshing instance network info cache due to event network-changed-120b7e4f-ee57-4bf0-a4a9-ecf2ab820872. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 786.513979] env[61594]: DEBUG oslo_concurrency.lockutils [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] Acquiring lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.514204] env[61594]: DEBUG oslo_concurrency.lockutils [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] Acquired lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.514381] env[61594]: DEBUG nova.network.neutron [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Refreshing network info cache for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 786.515389] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] Expecting reply to msg e5ec690a33a840efbba17b6ecfa5b56c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.524335] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3b261d28-a9b1-4551-8e39-2108e825aedc could not be found. [ 786.524574] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 786.524757] env[61594]: INFO nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Took 0.05 seconds to destroy the instance on the hypervisor. [ 786.525014] env[61594]: DEBUG oslo.service.loopingcall [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 786.525248] env[61594]: DEBUG nova.compute.manager [-] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 786.525343] env[61594]: DEBUG nova.network.neutron [-] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 786.529844] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5ec690a33a840efbba17b6ecfa5b56c [ 786.605202] env[61594]: DEBUG nova.network.neutron [-] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.605668] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a2147c99e02746d4b13b0590a10da1a3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.612816] env[61594]: DEBUG nova.network.neutron [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 786.614844] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2147c99e02746d4b13b0590a10da1a3 [ 786.617321] env[61594]: DEBUG nova.network.neutron [-] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.617733] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 297a8e6f3f0b471a8d61f63a72b70657 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.632879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 297a8e6f3f0b471a8d61f63a72b70657 [ 786.633756] env[61594]: INFO nova.compute.manager [-] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Took 0.11 seconds to deallocate network for instance. [ 786.635905] env[61594]: DEBUG nova.compute.claims [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 786.636092] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.636347] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.639124] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 11202926a9664717a875b3d4c79e517f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.712958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11202926a9664717a875b3d4c79e517f [ 786.848500] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77d38eea-4604-46f8-998c-a259db4ed6a4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.863094] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28eb9fee-dcbb-4962-b99c-8aeb8348097c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.872021] env[61594]: DEBUG nova.network.neutron [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.872588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] Expecting reply to msg c8b70e9dca4344b3b06d8c3f664ec974 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.910756] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8b70e9dca4344b3b06d8c3f664ec974 [ 786.911835] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea25825-e45b-446b-9566-98be003494ed {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.915761] env[61594]: DEBUG oslo_concurrency.lockutils [req-4f3ecd23-3cf1-4913-aa7c-906adfe6de60 req-bcc92727-9ee2-4c14-ab67-59bfa2b5d1d0 service nova] Releasing lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 786.925885] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdaadf2f-6a46-494c-8aaf-8f0dc4b5dd29 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.942159] env[61594]: DEBUG nova.compute.provider_tree [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.942691] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ac396a98c5f34102ad52a035f6ce362b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.961134] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac396a98c5f34102ad52a035f6ce362b [ 786.962365] env[61594]: DEBUG nova.scheduler.client.report [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 786.966212] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg bffe96dacf804a3da97d0f1296d3e9ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.980329] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bffe96dacf804a3da97d0f1296d3e9ef [ 786.981951] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.345s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.982211] env[61594]: ERROR nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Traceback (most recent call last): [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self.driver.spawn(context, instance, image_meta, [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] vm_ref = self.build_virtual_machine(instance, [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.982211] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] for vif in network_info: [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return self._sync_wrapper(fn, *args, **kwargs) [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self.wait() [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self[:] = self._gt.wait() [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return self._exit_event.wait() [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] result = hub.switch() [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 786.982925] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return self.greenlet.switch() [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] result = function(*args, **kwargs) [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] return func(*args, **kwargs) [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] raise e [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] nwinfo = self.network_api.allocate_for_instance( [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] created_port_ids = self._update_ports_for_instance( [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] with excutils.save_and_reraise_exception(): [ 786.983964] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] self.force_reraise() [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] raise self.value [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] updated_port = self._update_port( [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] _ensure_no_port_binding_failure(port) [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] raise exception.PortBindingFailed(port_id=port['id']) [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] nova.exception.PortBindingFailed: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. [ 786.984933] env[61594]: ERROR nova.compute.manager [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] [ 786.985365] env[61594]: DEBUG nova.compute.utils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.985365] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Build of instance 3b261d28-a9b1-4551-8e39-2108e825aedc was re-scheduled: Binding failed for port 120b7e4f-ee57-4bf0-a4a9-ecf2ab820872, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 786.985365] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 786.985365] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.985556] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.985612] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.986045] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg a151636ba3cb4ed2b9c246b707ac77b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 786.998288] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a151636ba3cb4ed2b9c246b707ac77b1 [ 787.185181] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquiring lock "28bcec42-4fb0-4ef1-b882-6224fdbcec16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.185335] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Lock "28bcec42-4fb0-4ef1-b882-6224fdbcec16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.186012] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg d57a4a33d4ab40708295550d9ae53e76 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.199951] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d57a4a33d4ab40708295550d9ae53e76 [ 787.200687] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 787.203064] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg a81a32a7738345f4bb3aa41e4642c114 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.245655] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a81a32a7738345f4bb3aa41e4642c114 [ 787.267082] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.267446] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 787.269211] env[61594]: INFO nova.compute.claims [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.271340] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg df137ee684574dcd83b377766a9825bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.281293] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.315924] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df137ee684574dcd83b377766a9825bd [ 787.316221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg a3f403347c154a05ab103eac09a11cb6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.331530] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3f403347c154a05ab103eac09a11cb6 [ 787.445654] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3026c65b-35e2-4509-8632-0b11f0c9ce1f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.461161] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af01ab6-63a1-46d6-b33b-dfb76014ccb3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.503510] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d3f87f-e04e-453c-b765-061c37be0f97 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.513151] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebd96e8-f7ec-4dc2-a992-87237e45d4c7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.535075] env[61594]: DEBUG nova.compute.provider_tree [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 787.535603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 89c36b0efca34a7da6978cc5e9da4b9a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.550751] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89c36b0efca34a7da6978cc5e9da4b9a [ 787.553889] env[61594]: DEBUG nova.scheduler.client.report [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 787.558864] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 767c3d006fb14aa7bf08d0af644a18bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.577564] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 767c3d006fb14aa7bf08d0af644a18bb [ 787.577927] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 787.578432] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 787.580071] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 414bf75d4566465d966413de37c94e71 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.643822] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 414bf75d4566465d966413de37c94e71 [ 787.645320] env[61594]: DEBUG nova.compute.utils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 787.645920] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg ee8c71cb2217401aba0fbc09af2a2165 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.649935] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 787.661650] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee8c71cb2217401aba0fbc09af2a2165 [ 787.662375] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 787.669178] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 29472875e93145b38987543e754d5be3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.729594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29472875e93145b38987543e754d5be3 [ 787.735020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg bc7b4809550a42d498e7509552c64d3c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.774871] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc7b4809550a42d498e7509552c64d3c [ 787.776529] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 787.804178] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 787.804667] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 787.804944] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.805259] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 787.805530] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.805788] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 787.806131] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 787.806403] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 787.806685] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 787.806960] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 787.807272] env[61594]: DEBUG nova.virt.hardware [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 787.808279] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3c2c0a-60ad-46f8-a34d-4816b571720a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.818190] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4172d063-e888-412c-abd5-dd44fe3a2f42 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.824492] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.825397] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 254a8d588e0a4f0792d38ccf64810134 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.837387] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 787.843248] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Creating folder: Project (2a98ff03844e4dbf912f40034f907789). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 787.844127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 254a8d588e0a4f0792d38ccf64810134 [ 787.844629] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34e504ce-4ca4-4b3d-ab2f-13d2fae7a5b3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.846748] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-3b261d28-a9b1-4551-8e39-2108e825aedc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.847096] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 787.847390] env[61594]: DEBUG nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 787.847683] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.861568] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Created folder: Project (2a98ff03844e4dbf912f40034f907789) in parent group-v277030. [ 787.861568] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Creating folder: Instances. Parent ref: group-v277044. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 787.861568] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-676a92a3-d598-430a-b2c9-1e9a3c688c7b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.874030] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Created folder: Instances in parent group-v277044. [ 787.874030] env[61594]: DEBUG oslo.service.loopingcall [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 787.874030] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 787.874030] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02289801-cfb8-44f9-81b6-d549657f235f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.904052] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 787.904052] env[61594]: value = "task-1291402" [ 787.904052] env[61594]: _type = "Task" [ 787.904052] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.911693] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291402, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.934764] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.936853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg afe94f053d9c40a2bc771dc35c1bb2d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.947287] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afe94f053d9c40a2bc771dc35c1bb2d8 [ 787.948331] env[61594]: DEBUG nova.network.neutron [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.948865] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 8a7dfdd0707f4c59833d795a015bed4c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.957787] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a7dfdd0707f4c59833d795a015bed4c [ 787.958493] env[61594]: INFO nova.compute.manager [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 3b261d28-a9b1-4551-8e39-2108e825aedc] Took 0.11 seconds to deallocate network for instance. [ 787.963344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ad9657747aff4d8abeebe5032e8ce3f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 787.997063] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad9657747aff4d8abeebe5032e8ce3f4 [ 787.999873] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 20bb68b6456a4406b141a3b193b600e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 788.038359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20bb68b6456a4406b141a3b193b600e4 [ 788.074053] env[61594]: INFO nova.scheduler.client.report [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Deleted allocations for instance 3b261d28-a9b1-4551-8e39-2108e825aedc [ 788.082204] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ba42e5debb7541acb1ea09f9b866117b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 788.108220] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba42e5debb7541acb1ea09f9b866117b [ 788.108619] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a53fd6b-ce3a-4a6f-bbc3-6c1916918b1a tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "3b261d28-a9b1-4551-8e39-2108e825aedc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.840s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 788.445228] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291402, 'name': CreateVM_Task, 'duration_secs': 0.324082} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.445552] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 788.445888] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 788.446087] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.446473] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 788.446908] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d394d378-7ad1-4546-a337-d4f90925f098 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.453454] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Waiting for the task: (returnval){ [ 788.453454] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52133672-df87-ce9a-a3e4-fc56b4f2cfb0" [ 788.453454] env[61594]: _type = "Task" [ 788.453454] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.463335] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52133672-df87-ce9a-a3e4-fc56b4f2cfb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.971219] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 788.971556] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.971803] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.928383] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "11ca01d2-83e3-42c4-bef5-87459148e858" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.928818] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "11ca01d2-83e3-42c4-bef5-87459148e858" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.929222] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 96cf01cdffb846c3899d5170782fa733 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 790.942752] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96cf01cdffb846c3899d5170782fa733 [ 790.943406] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 790.945710] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 6413c56992384c7091b31cdcec515d7f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 790.994137] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6413c56992384c7091b31cdcec515d7f [ 791.018784] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.019077] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 791.024564] env[61594]: INFO nova.compute.claims [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 791.026803] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 50c8712b731646df9ec901914659db08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.072852] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50c8712b731646df9ec901914659db08 [ 791.074943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 39238b2825ea4f518f406d65f6e7e9d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.083276] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39238b2825ea4f518f406d65f6e7e9d7 [ 791.182832] env[61594]: ERROR nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. [ 791.182832] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 791.182832] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 791.182832] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 791.182832] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.182832] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 791.182832] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.182832] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 791.182832] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.182832] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 791.182832] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.182832] env[61594]: ERROR nova.compute.manager raise self.value [ 791.182832] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.182832] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 791.182832] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.182832] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 791.183317] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.183317] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 791.183317] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. [ 791.183317] env[61594]: ERROR nova.compute.manager [ 791.183317] env[61594]: Traceback (most recent call last): [ 791.183317] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 791.183317] env[61594]: listener.cb(fileno) [ 791.183317] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 791.183317] env[61594]: result = function(*args, **kwargs) [ 791.183317] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 791.183317] env[61594]: return func(*args, **kwargs) [ 791.183317] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 791.183317] env[61594]: raise e [ 791.183317] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 791.183317] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 791.183317] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.183317] env[61594]: created_port_ids = self._update_ports_for_instance( [ 791.183317] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.183317] env[61594]: with excutils.save_and_reraise_exception(): [ 791.183317] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.183317] env[61594]: self.force_reraise() [ 791.183317] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.183317] env[61594]: raise self.value [ 791.183317] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.183317] env[61594]: updated_port = self._update_port( [ 791.183317] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.183317] env[61594]: _ensure_no_port_binding_failure(port) [ 791.183317] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.183317] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 791.184059] env[61594]: nova.exception.PortBindingFailed: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. [ 791.184059] env[61594]: Removing descriptor: 24 [ 791.184059] env[61594]: ERROR nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Traceback (most recent call last): [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] yield resources [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self.driver.spawn(context, instance, image_meta, [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 791.184059] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] vm_ref = self.build_virtual_machine(instance, [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] vif_infos = vmwarevif.get_vif_info(self._session, [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] for vif in network_info: [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return self._sync_wrapper(fn, *args, **kwargs) [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self.wait() [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self[:] = self._gt.wait() [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return self._exit_event.wait() [ 791.184376] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] result = hub.switch() [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return self.greenlet.switch() [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] result = function(*args, **kwargs) [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return func(*args, **kwargs) [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] raise e [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] nwinfo = self.network_api.allocate_for_instance( [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.184700] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] created_port_ids = self._update_ports_for_instance( [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] with excutils.save_and_reraise_exception(): [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self.force_reraise() [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] raise self.value [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] updated_port = self._update_port( [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] _ensure_no_port_binding_failure(port) [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.185037] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] raise exception.PortBindingFailed(port_id=port['id']) [ 791.185414] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] nova.exception.PortBindingFailed: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. [ 791.185414] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] [ 791.185414] env[61594]: INFO nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Terminating instance [ 791.186690] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-18087c89-5fb4-4a16-a3bb-d48712ce25f5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 791.186690] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-18087c89-5fb4-4a16-a3bb-d48712ce25f5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.186802] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 791.187319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg c35c1e5d0881441cb75f5ee4fec781ea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.197253] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-367f2a7c-1603-4d9a-829a-3b07267583a9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.200785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c35c1e5d0881441cb75f5ee4fec781ea [ 791.210837] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81b6a72-4083-4c5f-a49e-6af6c69cd9bd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.244149] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c674af31-8018-47d6-92ff-0c12b48cda14 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.252854] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442759d6-e05f-4f04-b238-e4b65aecb989 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.267748] env[61594]: DEBUG nova.compute.provider_tree [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 791.268298] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg c10a4a47fef943be84fbdfcc0c1bb273 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.276852] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c10a4a47fef943be84fbdfcc0c1bb273 [ 791.277899] env[61594]: DEBUG nova.scheduler.client.report [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 791.280389] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 872b5c3f0cfc4dd891d5d501764cc3d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.296850] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 872b5c3f0cfc4dd891d5d501764cc3d5 [ 791.297841] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.279s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 791.298408] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 791.301427] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 24e0660681444e27a59aea4bccadefb9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.343946] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24e0660681444e27a59aea4bccadefb9 [ 791.345451] env[61594]: DEBUG nova.compute.utils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 791.346068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 8d014f925a4c4b90840a6130baaac325 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.347504] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 791.347504] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 791.364011] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d014f925a4c4b90840a6130baaac325 [ 791.364865] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 791.366731] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg ef7c72d86f8341008a7455f7bbc7400d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.417453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef7c72d86f8341008a7455f7bbc7400d [ 791.419480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg a27e819bf46d4c798a50ba31ac2c1715 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 791.456826] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a27e819bf46d4c798a50ba31ac2c1715 [ 791.458447] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 791.494229] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 791.494584] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 791.494666] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.494840] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 791.494980] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.495785] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 791.496108] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 791.496948] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 791.496948] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 791.496948] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 791.497220] env[61594]: DEBUG nova.virt.hardware [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.498687] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a450c8-14c2-497c-ae67-4380eb49032f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.507330] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf89e72-1d93-4e92-b31a-754b906ccd33 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.525757] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 791.567882] env[61594]: DEBUG nova.policy [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06291742abbe4dee8092fba657b8ab91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b54f5acf42c64133afbb208929492c31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 792.233260] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "72c150cd-b1f2-451d-bb6b-6b8b668e97a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.233582] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "72c150cd-b1f2-451d-bb6b-6b8b668e97a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.234082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 60896ac4d7784e1a97785303288244f9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.248181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60896ac4d7784e1a97785303288244f9 [ 792.248181] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 792.248181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 96d7856731f94eb4ac606599a824ea9f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.277048] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.277048] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 8c215913cca44289a71209bc9da28a59 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.292403] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c215913cca44289a71209bc9da28a59 [ 792.292403] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-18087c89-5fb4-4a16-a3bb-d48712ce25f5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.292403] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 792.292403] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 792.292403] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e81acf0-da89-4c70-b742-c7cf154f8da3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.294983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96d7856731f94eb4ac606599a824ea9f [ 792.308704] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72bf00d9-9fd6-4638-89a7-3c2a240bffb1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.324640] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.324875] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.326652] env[61594]: INFO nova.compute.claims [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 792.328763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg a4cf933f7b5c4d4a8faac825b17ee68e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.347100] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 18087c89-5fb4-4a16-a3bb-d48712ce25f5 could not be found. [ 792.347100] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 792.347100] env[61594]: INFO nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 792.347100] env[61594]: DEBUG oslo.service.loopingcall [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 792.347100] env[61594]: DEBUG nova.compute.manager [-] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 792.347276] env[61594]: DEBUG nova.network.neutron [-] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 792.388039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4cf933f7b5c4d4a8faac825b17ee68e [ 792.391575] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 7616de8db8054e35a5df96b6d42229d4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.403171] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7616de8db8054e35a5df96b6d42229d4 [ 792.553018] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856a069f-cb98-428f-9b80-4f2158f52611 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.560561] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16859738-13d5-498b-b0a4-353496471826 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.596778] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70219f51-2af0-41af-92da-02745b535115 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.609154] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0594a1-69b9-481a-a43b-d3ce505b2704 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.626543] env[61594]: DEBUG nova.compute.provider_tree [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.632115] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 7db7e575a3d44149b988fc20c1f62e24 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.644606] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7db7e575a3d44149b988fc20c1f62e24 [ 792.644606] env[61594]: DEBUG nova.scheduler.client.report [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 792.647053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 598be434ee3741e7875a099f7180683c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.663874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 598be434ee3741e7875a099f7180683c [ 792.664996] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.340s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 792.665323] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 792.666970] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 6a7da27a94794d02a81e1dc0dd178c33 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.712707] env[61594]: DEBUG nova.network.neutron [-] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 792.712707] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b5bec1ed09e748c9a9a174c7d5efd53e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.720128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5bec1ed09e748c9a9a174c7d5efd53e [ 792.722779] env[61594]: DEBUG nova.network.neutron [-] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.722779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cd3958f44b594c8c9b786d5ccc7698a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.722779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a7da27a94794d02a81e1dc0dd178c33 [ 792.724041] env[61594]: DEBUG nova.compute.utils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 792.725027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg f143cff756b34b6ba396d562f8ae5fb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.727107] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 792.727746] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 792.731496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd3958f44b594c8c9b786d5ccc7698a9 [ 792.732033] env[61594]: INFO nova.compute.manager [-] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Took 0.39 seconds to deallocate network for instance. [ 792.736017] env[61594]: DEBUG nova.compute.claims [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 792.736017] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 792.736017] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 792.736875] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 2b1945f9c45d4b51828ca506b95c5f3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.738630] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f143cff756b34b6ba396d562f8ae5fb4 [ 792.739229] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 792.741846] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 5d8d206f94cc49ba88fd61202c9e0d4d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.804115] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b1945f9c45d4b51828ca506b95c5f3a [ 792.811802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d8d206f94cc49ba88fd61202c9e0d4d [ 792.819527] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 91720aedf8cc4efbad242aa5b142cf95 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 792.847481] env[61594]: DEBUG nova.policy [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ce2159a0ccb46c89a7574d04142e926', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbc2efe50ed4ae5a5f0cf6f492a20bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 792.879655] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91720aedf8cc4efbad242aa5b142cf95 [ 792.883954] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 792.934954] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 792.934954] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 792.934954] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.937095] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 792.937436] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.939038] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 792.939038] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 792.939038] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 792.939353] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 792.939574] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 792.939797] env[61594]: DEBUG nova.virt.hardware [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.942688] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c63f97-4c7c-4a25-b64f-4d0ed66bff5e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.956899] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d86324-aeef-4d70-a6cd-b081c552ee31 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.004538] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e575ac71-e4a2-4d0f-9093-14f23d3cf1a9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.018591] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffc2354-eb78-4037-9bf4-9ae78b8d6a65 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.068329] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18db46e-bab8-4fff-8ec5-c15e5aa48343 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.078696] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93edd64f-235a-4b56-9f5c-8bf7162d5c43 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.098566] env[61594]: DEBUG nova.compute.provider_tree [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.099020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f56e4262fccd459ba84870f65773c2f5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.111791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f56e4262fccd459ba84870f65773c2f5 [ 793.112961] env[61594]: DEBUG nova.scheduler.client.report [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 793.120028] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 5034f5089ea944258f447b0bea4a5de4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.139692] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5034f5089ea944258f447b0bea4a5de4 [ 793.140715] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.406s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.142251] env[61594]: ERROR nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Traceback (most recent call last): [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self.driver.spawn(context, instance, image_meta, [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] vm_ref = self.build_virtual_machine(instance, [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.142251] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] for vif in network_info: [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return self._sync_wrapper(fn, *args, **kwargs) [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self.wait() [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self[:] = self._gt.wait() [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return self._exit_event.wait() [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] result = hub.switch() [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 793.142736] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return self.greenlet.switch() [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] result = function(*args, **kwargs) [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] return func(*args, **kwargs) [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] raise e [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] nwinfo = self.network_api.allocate_for_instance( [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] created_port_ids = self._update_ports_for_instance( [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] with excutils.save_and_reraise_exception(): [ 793.143219] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] self.force_reraise() [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] raise self.value [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] updated_port = self._update_port( [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] _ensure_no_port_binding_failure(port) [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] raise exception.PortBindingFailed(port_id=port['id']) [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] nova.exception.PortBindingFailed: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. [ 793.143695] env[61594]: ERROR nova.compute.manager [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] [ 793.144120] env[61594]: DEBUG nova.compute.utils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 793.149433] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Build of instance 18087c89-5fb4-4a16-a3bb-d48712ce25f5 was re-scheduled: Binding failed for port b4396922-ab16-4a9a-82f6-838138d4a038, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 793.150041] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 793.150358] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-18087c89-5fb4-4a16-a3bb-d48712ce25f5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.153727] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-18087c89-5fb4-4a16-a3bb-d48712ce25f5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.153727] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 793.153727] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg da93dd0460f140d093bb5cdf5ca7b4bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.168327] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da93dd0460f140d093bb5cdf5ca7b4bb [ 793.261139] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.300797] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.300797] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.301245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 0dd1c58173ef4370b162bea67ce2b865 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.314655] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0dd1c58173ef4370b162bea67ce2b865 [ 793.315228] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 793.317032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 9deac4bc11a440cda3af29102dec2dec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.371921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9deac4bc11a440cda3af29102dec2dec [ 793.405906] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 793.406278] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 793.408356] env[61594]: INFO nova.compute.claims [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.409712] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg ba3a3606711444589ce0149fac321b5c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.470277] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba3a3606711444589ce0149fac321b5c [ 793.474588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 57ba3f4afc694e73b40012c148d8c34f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.488952] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57ba3f4afc694e73b40012c148d8c34f [ 793.657572] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6e9197-1c18-46b8-a356-ba0a15fdad18 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.666356] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2fedb0-621d-41d6-8471-c62777e4d0ed {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.705332] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8eaa452-944f-41df-a410-3fae14825b0a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.716360] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c5fc3c-0164-4ecc-b4b4-327a99e10e8e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.731317] env[61594]: DEBUG nova.compute.provider_tree [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 793.731870] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg b762080aa0d24eb58f97fd4ab2fccec7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.742194] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b762080aa0d24eb58f97fd4ab2fccec7 [ 793.743289] env[61594]: DEBUG nova.scheduler.client.report [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 793.747254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 06854b163afe4591bb74adf063b8b132 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.748726] env[61594]: ERROR nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. [ 793.748726] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 793.748726] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 793.748726] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 793.748726] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 793.748726] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 793.748726] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 793.748726] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 793.748726] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.748726] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 793.748726] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.748726] env[61594]: ERROR nova.compute.manager raise self.value [ 793.748726] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 793.748726] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 793.748726] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.748726] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 793.749290] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.749290] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 793.749290] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. [ 793.749290] env[61594]: ERROR nova.compute.manager [ 793.749290] env[61594]: Traceback (most recent call last): [ 793.749290] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 793.749290] env[61594]: listener.cb(fileno) [ 793.749290] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 793.749290] env[61594]: result = function(*args, **kwargs) [ 793.749290] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 793.749290] env[61594]: return func(*args, **kwargs) [ 793.749290] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 793.749290] env[61594]: raise e [ 793.749290] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 793.749290] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 793.749290] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 793.749290] env[61594]: created_port_ids = self._update_ports_for_instance( [ 793.749290] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 793.749290] env[61594]: with excutils.save_and_reraise_exception(): [ 793.749290] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.749290] env[61594]: self.force_reraise() [ 793.749290] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.749290] env[61594]: raise self.value [ 793.749290] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 793.749290] env[61594]: updated_port = self._update_port( [ 793.749290] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.749290] env[61594]: _ensure_no_port_binding_failure(port) [ 793.749290] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.749290] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 793.750224] env[61594]: nova.exception.PortBindingFailed: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. [ 793.750224] env[61594]: Removing descriptor: 22 [ 793.750307] env[61594]: ERROR nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Traceback (most recent call last): [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] yield resources [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self.driver.spawn(context, instance, image_meta, [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] vm_ref = self.build_virtual_machine(instance, [ 793.750307] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] vif_infos = vmwarevif.get_vif_info(self._session, [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] for vif in network_info: [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return self._sync_wrapper(fn, *args, **kwargs) [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self.wait() [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self[:] = self._gt.wait() [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return self._exit_event.wait() [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 793.750649] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] result = hub.switch() [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return self.greenlet.switch() [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] result = function(*args, **kwargs) [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return func(*args, **kwargs) [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] raise e [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] nwinfo = self.network_api.allocate_for_instance( [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] created_port_ids = self._update_ports_for_instance( [ 793.751048] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] with excutils.save_and_reraise_exception(): [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self.force_reraise() [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] raise self.value [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] updated_port = self._update_port( [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] _ensure_no_port_binding_failure(port) [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] raise exception.PortBindingFailed(port_id=port['id']) [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] nova.exception.PortBindingFailed: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. [ 793.751528] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] [ 793.752132] env[61594]: INFO nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Terminating instance [ 793.756025] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "refresh_cache-1eebe96f-c03f-4069-99d0-ea2a50de6f35" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 793.756025] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquired lock "refresh_cache-1eebe96f-c03f-4069-99d0-ea2a50de6f35" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.756025] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 793.756025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 4a098917262248c8a26ee4fc228ebfc4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.764638] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a098917262248c8a26ee4fc228ebfc4 [ 793.770258] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06854b163afe4591bb74adf063b8b132 [ 793.771104] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.365s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 793.771619] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 793.773629] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 208f5edbd6f34de2a25e4c66f9d8b861 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.803829] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Successfully created port: c8c36b11-f4c4-41e7-984e-dbd996ee47d9 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 793.816053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 208f5edbd6f34de2a25e4c66f9d8b861 [ 793.817460] env[61594]: DEBUG nova.compute.utils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 793.818066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg f9c91736bc944382ad3f3b003c0abf0b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.818920] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 793.819120] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 793.837285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9c91736bc944382ad3f3b003c0abf0b [ 793.838502] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 793.839724] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg d2da140dd2b74676bcdae1cf8f363f98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.886032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2da140dd2b74676bcdae1cf8f363f98 [ 793.888503] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 6de6b1ce03384007bd66a962f421cc86 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 793.895328] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.934903] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6de6b1ce03384007bd66a962f421cc86 [ 793.936146] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 793.979182] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 793.979543] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 793.979718] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 793.979905] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 793.980510] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 793.980510] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 793.980510] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 793.980653] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 793.980827] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 793.980997] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 793.981202] env[61594]: DEBUG nova.virt.hardware [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 793.982442] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96aabf5e-c729-4859-9091-ec4a29723207 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.997035] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68aa1774-76b9-456c-8f78-8372dc0d2e3e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.130243] env[61594]: DEBUG nova.policy [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '084fb304b8aa400ebcd3bab4404c36ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1a71ad350ba241b5a3f933887cb0e8a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 794.151819] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.152397] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 497540538a8c4bd28726e1afaee2f7d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 794.164172] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 497540538a8c4bd28726e1afaee2f7d8 [ 794.164787] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-18087c89-5fb4-4a16-a3bb-d48712ce25f5" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.164997] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 794.165236] env[61594]: DEBUG nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 794.165411] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 794.269154] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 794.269915] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 085bcf0b1a3844068b04d66c541ca763 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 794.281065] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 085bcf0b1a3844068b04d66c541ca763 [ 794.281359] env[61594]: DEBUG nova.network.neutron [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.282095] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg fac7c1e5aebe4803a7663eec08f355b8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 794.292971] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fac7c1e5aebe4803a7663eec08f355b8 [ 794.294056] env[61594]: INFO nova.compute.manager [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 18087c89-5fb4-4a16-a3bb-d48712ce25f5] Took 0.13 seconds to deallocate network for instance. [ 794.296244] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 1bd48a5884f643fda4d1e792b894e7a0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 794.348462] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1bd48a5884f643fda4d1e792b894e7a0 [ 794.352058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 6a2675840f11471ab9e5bea788133628 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 794.391480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a2675840f11471ab9e5bea788133628 [ 794.427147] env[61594]: INFO nova.scheduler.client.report [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Deleted allocations for instance 18087c89-5fb4-4a16-a3bb-d48712ce25f5 [ 794.434881] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 239b945a8dff4935abe4bdcaf673f95e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 794.453659] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 239b945a8dff4935abe4bdcaf673f95e [ 794.454658] env[61594]: DEBUG oslo_concurrency.lockutils [None req-02eb3e51-f0d0-4e59-b4c7-e2fd7f680940 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "18087c89-5fb4-4a16-a3bb-d48712ce25f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.290s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 794.851776] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.852325] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg b757f953797140659cf30b19b92846c2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 794.861290] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b757f953797140659cf30b19b92846c2 [ 794.861901] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Releasing lock "refresh_cache-1eebe96f-c03f-4069-99d0-ea2a50de6f35" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 794.862313] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 794.862510] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 794.863058] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d3351e91-aaef-4224-bb7d-f1283998a317 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.873414] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f51f7ee-7b4a-4668-8a67-8a937b85854f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.898573] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1eebe96f-c03f-4069-99d0-ea2a50de6f35 could not be found. [ 794.898809] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 794.898998] env[61594]: INFO nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Took 0.04 seconds to destroy the instance on the hypervisor. [ 794.899265] env[61594]: DEBUG oslo.service.loopingcall [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 794.899497] env[61594]: DEBUG nova.compute.manager [-] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 794.899591] env[61594]: DEBUG nova.network.neutron [-] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 795.020554] env[61594]: DEBUG nova.network.neutron [-] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.021101] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ada208ed310d4bb2871b5982c4378152 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 795.027929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ada208ed310d4bb2871b5982c4378152 [ 795.028425] env[61594]: DEBUG nova.network.neutron [-] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.029159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fa6bad5a07f9425589cba19eea4fddd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 795.037309] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa6bad5a07f9425589cba19eea4fddd1 [ 795.037309] env[61594]: INFO nova.compute.manager [-] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Took 0.14 seconds to deallocate network for instance. [ 795.039271] env[61594]: DEBUG nova.compute.claims [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 795.039675] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 795.040537] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 795.042561] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 491d0dc4ca894653a189f94349f23e7c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 795.083150] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 491d0dc4ca894653a189f94349f23e7c [ 795.181864] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9183aa23-5c93-4568-b32a-2690f06f38f4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.190381] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbfe3a2-d9a4-4f98-b2e4-5f4dfd6cf57a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.222722] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a317a2a1-7101-4d13-8638-85357a69d23b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.231765] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dcf61a-bd68-478d-a753-6c50dcba299f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.245907] env[61594]: DEBUG nova.compute.provider_tree [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 795.246524] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg b018a94c03b44986af366dfe17a38898 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 795.255671] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b018a94c03b44986af366dfe17a38898 [ 795.256696] env[61594]: DEBUG nova.scheduler.client.report [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 795.259155] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg c05aeb63c061430c8f05793268f585c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 795.273959] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c05aeb63c061430c8f05793268f585c5 [ 795.274776] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.235s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 795.275638] env[61594]: ERROR nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Traceback (most recent call last): [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self.driver.spawn(context, instance, image_meta, [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self._vmops.spawn(context, instance, image_meta, injected_files, [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] vm_ref = self.build_virtual_machine(instance, [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] vif_infos = vmwarevif.get_vif_info(self._session, [ 795.275638] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] for vif in network_info: [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return self._sync_wrapper(fn, *args, **kwargs) [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self.wait() [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self[:] = self._gt.wait() [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return self._exit_event.wait() [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] result = hub.switch() [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 795.276113] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return self.greenlet.switch() [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] result = function(*args, **kwargs) [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] return func(*args, **kwargs) [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] raise e [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] nwinfo = self.network_api.allocate_for_instance( [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] created_port_ids = self._update_ports_for_instance( [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] with excutils.save_and_reraise_exception(): [ 795.276591] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] self.force_reraise() [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] raise self.value [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] updated_port = self._update_port( [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] _ensure_no_port_binding_failure(port) [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] raise exception.PortBindingFailed(port_id=port['id']) [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] nova.exception.PortBindingFailed: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. [ 795.277072] env[61594]: ERROR nova.compute.manager [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] [ 795.277363] env[61594]: DEBUG nova.compute.utils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 795.279667] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Build of instance 1eebe96f-c03f-4069-99d0-ea2a50de6f35 was re-scheduled: Binding failed for port bd590870-c0eb-4002-80ae-72ba3ded7e91, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 795.280156] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 795.280785] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "refresh_cache-1eebe96f-c03f-4069-99d0-ea2a50de6f35" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 795.280785] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquired lock "refresh_cache-1eebe96f-c03f-4069-99d0-ea2a50de6f35" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.280785] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 795.281228] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 2a2a1e8d681e4e7ea102770dd948bb01 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 795.292863] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a2a1e8d681e4e7ea102770dd948bb01 [ 795.389400] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 795.728743] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Successfully created port: 1e3c1995-8599-4229-aaad-23c894ad768e {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 795.922920] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Successfully created port: 53214105-fc54-4a7d-ac37-7cb3c832497a {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.138890] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.138890] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 43f419ae73384dd2bfcffedb5e34fe90 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.150815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 43f419ae73384dd2bfcffedb5e34fe90 [ 796.151978] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Releasing lock "refresh_cache-1eebe96f-c03f-4069-99d0-ea2a50de6f35" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 796.152359] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 796.152661] env[61594]: DEBUG nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 796.152964] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 796.292494] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Acquiring lock "48b27adc-0bc7-44dd-9330-d0b30593f95f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.292494] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Lock "48b27adc-0bc7-44dd-9330-d0b30593f95f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.292494] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 1b834aa3ce1549bf9fea09ffbe408718 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.305021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b834aa3ce1549bf9fea09ffbe408718 [ 796.305021] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 796.305021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 9c34683d0af94784bfb6e17d3673cd7a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.344024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c34683d0af94784bfb6e17d3673cd7a [ 796.386587] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 796.387254] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 796.389944] env[61594]: INFO nova.compute.claims [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 796.394219] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 853c982791014219aadbc267788b2f00 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.443253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 853c982791014219aadbc267788b2f00 [ 796.445710] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg b1bdc2b32afa47f280b465bddaf1b807 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.466505] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1bdc2b32afa47f280b465bddaf1b807 [ 796.481103] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 796.481251] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg af11d6139e474ab8b99136f10bc7bb7f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.491864] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af11d6139e474ab8b99136f10bc7bb7f [ 796.492835] env[61594]: DEBUG nova.network.neutron [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.493368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 4ad673cf496d44b09ebd08a98ff080bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.502453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4ad673cf496d44b09ebd08a98ff080bc [ 796.503373] env[61594]: INFO nova.compute.manager [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 1eebe96f-c03f-4069-99d0-ea2a50de6f35] Took 0.35 seconds to deallocate network for instance. [ 796.505597] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 96e2db0d85dc4355a66a66b421ff434f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.582089] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96e2db0d85dc4355a66a66b421ff434f [ 796.583393] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 104344e4ad174b1cbc1e2c1cb7a4b8a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.618308] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1c5c03-92e7-4052-bdf0-b0f303314409 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.623590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 104344e4ad174b1cbc1e2c1cb7a4b8a4 [ 796.629795] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d14d8e-db73-4e26-b093-3054262358f1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.669402] env[61594]: INFO nova.scheduler.client.report [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Deleted allocations for instance 1eebe96f-c03f-4069-99d0-ea2a50de6f35 [ 796.676199] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc69eea-39ef-4249-b9a2-1ab21a250293 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.679555] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 90bf7299edf849fa90f5dd8deca0cb00 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.687687] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca238ee9-5f98-4a92-bd8d-ac097ba6c635 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.705639] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90bf7299edf849fa90f5dd8deca0cb00 [ 796.706304] env[61594]: DEBUG nova.compute.provider_tree [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 796.706774] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 0428c36503a14abfb5c51017a4e8bf8b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.707927] env[61594]: DEBUG oslo_concurrency.lockutils [None req-d361c577-3bb4-4524-abf8-56d6661ff30a tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "1eebe96f-c03f-4069-99d0-ea2a50de6f35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.709s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.714574] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0428c36503a14abfb5c51017a4e8bf8b [ 796.715455] env[61594]: DEBUG nova.scheduler.client.report [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 796.717913] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg c4190bd06be04688aaf0c4008253a1c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.730802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4190bd06be04688aaf0c4008253a1c9 [ 796.731618] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.345s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 796.731968] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 796.733749] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 5ae890379547480c90fc8761b19c4e76 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.768527] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ae890379547480c90fc8761b19c4e76 [ 796.769963] env[61594]: DEBUG nova.compute.utils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 796.770646] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 099caa55b13e458bb7816daddc72e7fa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.771516] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 796.771684] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 796.784747] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 099caa55b13e458bb7816daddc72e7fa [ 796.785488] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 796.787352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 6f1b4479e18248279c7bbfd9c4c643eb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 796.830811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f1b4479e18248279c7bbfd9c4c643eb [ 796.832424] env[61594]: INFO nova.virt.block_device [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Booting with volume 434f3adf-e546-451c-af96-c91009ff3d7e at /dev/sda [ 796.909929] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-149f83be-a1db-48f1-a58d-bfb76cbd60ec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.925067] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887aec1e-e86c-4b9e-93da-38c9b9b3b601 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.956075] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce34bc53-0238-486b-aaca-c557dfe99174 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.965709] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7011c1f-0f81-479d-bc9c-80e91875d9f4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.994241] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda72945-da96-4e55-9287-2bbc9f4a6d97 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.009022] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b02606-ad28-463c-b662-2cff959948dd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.024967] env[61594]: DEBUG nova.virt.block_device [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Updating existing volume attachment record: d2d631a4-764e-4f94-83a8-866a248d6077 {{(pid=61594) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 797.074484] env[61594]: DEBUG nova.policy [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '797bb578a1db4de9b026ae356342e3eb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bc87957366af49c0b5fa7a2297203f53', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 797.219441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 2dde5885ce5749fead397e7d5da8e264 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.239905] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2dde5885ce5749fead397e7d5da8e264 [ 797.286075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg aa7396af6fab431f866161009f03fad1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.299109] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aa7396af6fab431f866161009f03fad1 [ 797.301685] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 0305e97b90dc4833822eb429e429f32b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.343291] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Acquiring lock "93ecfc04-23c5-41fc-babf-1a5bd02769b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.343526] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Lock "93ecfc04-23c5-41fc-babf-1a5bd02769b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.343991] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 90e7fdd427144af9bc533af58d688c22 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.346299] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0305e97b90dc4833822eb429e429f32b [ 797.347381] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 797.347921] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 797.348169] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 797.349327] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.349327] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 797.349327] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.349327] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 797.349327] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 797.349662] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 797.349662] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 797.349662] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 797.349662] env[61594]: DEBUG nova.virt.hardware [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 797.350715] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40d7d9d-bce3-4a1f-8f60-786e997b3390 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.359630] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 90e7fdd427144af9bc533af58d688c22 [ 797.360372] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 797.362435] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 82861e2b9d4244f1909c787567b21b77 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.368086] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8418d9-a0c6-4292-8133-59fde8656ae3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.413153] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 82861e2b9d4244f1909c787567b21b77 [ 797.435377] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 797.435634] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 797.437213] env[61594]: INFO nova.compute.claims [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 797.440129] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 061fb745239043df90d96172daf89c7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.507550] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 061fb745239043df90d96172daf89c7e [ 797.508605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 346a2a98f6314efcbc9a05e5f4701b29 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.524450] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 346a2a98f6314efcbc9a05e5f4701b29 [ 797.658269] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148b47f2-49b8-422a-8d4b-60520eae7522 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.673818] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f86c5f-39e1-4925-9c03-76e49b4a60ca {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.713995] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd36c78-60bd-47a3-b656-27eb483ca585 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.721955] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363d6868-cdf0-4bd8-8f32-b705329382a0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.742492] env[61594]: DEBUG nova.compute.provider_tree [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.743742] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg f2f3fa47814642faa7d912a8cfb5aa56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.759282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2f3fa47814642faa7d912a8cfb5aa56 [ 797.760297] env[61594]: DEBUG nova.scheduler.client.report [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 797.764338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 0ec5e3956b2342e19c727846a4613fac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.789936] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ec5e3956b2342e19c727846a4613fac [ 797.792192] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.355s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 797.795443] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 797.797104] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg d2d1c8c2365e466593d0e6ac2933056a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.851140] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2d1c8c2365e466593d0e6ac2933056a [ 797.853031] env[61594]: DEBUG nova.compute.utils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 797.853292] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 98d821297003401b962b49327a9adf5c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.854168] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 797.854337] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 797.868230] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98d821297003401b962b49327a9adf5c [ 797.869118] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 797.870783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 129a9b7386904ef0934c06715eb3f762 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.927132] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 129a9b7386904ef0934c06715eb3f762 [ 797.929919] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 3654d8606d614e308c699ac73a034338 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 797.972333] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3654d8606d614e308c699ac73a034338 [ 797.973706] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 798.008371] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 798.008842] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 798.009052] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 798.009248] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 798.009398] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 798.009545] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 798.009750] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 798.009908] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 798.010085] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 798.010314] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 798.010508] env[61594]: DEBUG nova.virt.hardware [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 798.011918] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4916de5-d62e-411c-9926-23facde3243e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.022418] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c6550f-d1fb-4340-8bac-2c12c28a0f4e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.130899] env[61594]: DEBUG nova.policy [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '769b9425a8e04075a3831fe07d41ccd6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92ff6ab8a7d84bd4a5d50103a3b14bc2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 799.573171] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Successfully created port: 42447106-00fb-4146-a1a8-05ce6aa104cc {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.026687] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Successfully created port: 8b090fba-fb82-4365-b99c-e7d58baf8a68 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 800.033176] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.034269] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.034269] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg f439ce8879f144b89fa6a26551954eab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.048429] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f439ce8879f144b89fa6a26551954eab [ 800.050989] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 800.054448] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 295b0d8358b84f7b9d0d1bd90a9b1ed7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.117917] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 295b0d8358b84f7b9d0d1bd90a9b1ed7 [ 800.162136] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.162423] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.167014] env[61594]: INFO nova.compute.claims [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.168040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 1a3e0556c6be4b3c8d444fe71bb38676 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.216675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1a3e0556c6be4b3c8d444fe71bb38676 [ 800.220727] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 216b31d53b0f42a789b9d42fd2622b4f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.230628] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 216b31d53b0f42a789b9d42fd2622b4f [ 800.331031] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "e600280c-2414-420d-bc8d-6e3e7979fccf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.331347] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "e600280c-2414-420d-bc8d-6e3e7979fccf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.332129] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg bbfbd7b7ed3d4198ad24a9de7f15a5c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.352354] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbfbd7b7ed3d4198ad24a9de7f15a5c5 [ 800.352950] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 800.354940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 0aa593f2f1ff4ad6bbb667c706b85948 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.374654] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "4ad1a310-5786-4bb6-87ff-72069f692eff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.375055] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "4ad1a310-5786-4bb6-87ff-72069f692eff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.375404] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg a2a2f0ce3e274b6f849e56b918dbd1d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.396454] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2a2f0ce3e274b6f849e56b918dbd1d8 [ 800.397379] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 800.400441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 26d26532c04249e68fdc5eab9685c6a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.449632] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aa593f2f1ff4ad6bbb667c706b85948 [ 800.480442] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.482352] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c54a8d-ed14-460e-b2a3-126d4472166d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.493237] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26d26532c04249e68fdc5eab9685c6a7 [ 800.496760] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5fb8dc-db98-4976-83a2-30fb1b52ecf2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.538969] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 800.539781] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a1aad8-063d-40d8-8103-00efb6346f27 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.549792] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8860ae0a-9436-47c5-b80b-d3cbb0aae543 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.567309] env[61594]: DEBUG nova.compute.provider_tree [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.567309] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 34251d578c2d48329bc65fdbb9486081 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.582209] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 34251d578c2d48329bc65fdbb9486081 [ 800.583295] env[61594]: DEBUG nova.scheduler.client.report [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 800.585944] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 26cf5303a9d640979a0ca2efafe6e765 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.599583] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26cf5303a9d640979a0ca2efafe6e765 [ 800.601356] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.438s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.601356] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 800.603209] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 9960dc1b4e3243c39b7b6f5c20b7073d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.604595] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.124s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.609016] env[61594]: INFO nova.compute.claims [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.609016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 39d4d326bf104fe783fbff9c4e9f1252 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.654193] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9960dc1b4e3243c39b7b6f5c20b7073d [ 800.654830] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39d4d326bf104fe783fbff9c4e9f1252 [ 800.656206] env[61594]: DEBUG nova.compute.utils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 800.656945] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 81f5addefb984f66824a4d7c86812017 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.661022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 3e327c8a67984977be360f577a52b12a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.661022] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 800.662496] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 800.669390] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81f5addefb984f66824a4d7c86812017 [ 800.670482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e327c8a67984977be360f577a52b12a [ 800.671930] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 800.673595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 31ddd56572204a7185e0b01646f16143 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.708774] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31ddd56572204a7185e0b01646f16143 [ 800.712685] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 13281747f69d4b77a56411f2f0c30d85 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.753081] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13281747f69d4b77a56411f2f0c30d85 [ 800.754497] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 800.785908] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 800.785908] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 800.785908] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.786352] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 800.786352] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.787218] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 800.787218] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 800.787218] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 800.787218] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 800.787403] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 800.787534] env[61594]: DEBUG nova.virt.hardware [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 800.788440] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4502cb07-b594-475a-99d3-73944be871df {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.802469] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b1db2f-2b81-4166-984a-aca4baac0e01 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.869816] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c95d85-45eb-4329-a08b-0a8767e54935 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.878845] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433cbbfc-7cc6-4971-be47-e69c69374385 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.915310] env[61594]: DEBUG nova.policy [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f05a61e0f6499bb35c44d254226249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bff3be1976444e58a2b7be93d47f50ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 800.917330] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db58c984-3f3d-4004-a653-9e18bbb48862 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.928484] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ef82df-f9de-4ce7-8a83-620cb781c771 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.945222] env[61594]: DEBUG nova.compute.provider_tree [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.945759] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 9ba3e1bfe6c74718b6adcedeb72a23bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.961141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ba3e1bfe6c74718b6adcedeb72a23bd [ 800.961141] env[61594]: DEBUG nova.scheduler.client.report [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 800.963794] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg a82e423a5b374e039da3cfd23f286483 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.979432] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a82e423a5b374e039da3cfd23f286483 [ 800.980315] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.376s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 800.980821] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 800.982825] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg fef0c72fbe364cb6a047a29854abacf4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 800.986047] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.445s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 800.986154] env[61594]: INFO nova.compute.claims [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.987603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 126b6cbf359a4e35860a9578d62cb416 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.023223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fef0c72fbe364cb6a047a29854abacf4 [ 801.024147] env[61594]: DEBUG nova.compute.utils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.024594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg b4bd207f5fcc41d2a10287ad757077b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.026679] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 126b6cbf359a4e35860a9578d62cb416 [ 801.027161] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 801.027347] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 801.031075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg c76daed21fff41a28154baa0d3e96393 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.036900] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4bd207f5fcc41d2a10287ad757077b1 [ 801.038713] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 801.038943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg e9b4861fd8bc4ad096068d3b9a3acfde in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.041537] env[61594]: ERROR nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. [ 801.041537] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 801.041537] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 801.041537] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 801.041537] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 801.041537] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 801.041537] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 801.041537] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 801.041537] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.041537] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 801.041537] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.041537] env[61594]: ERROR nova.compute.manager raise self.value [ 801.041537] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 801.041537] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 801.041537] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.041537] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 801.042228] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.042228] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 801.042228] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. [ 801.042228] env[61594]: ERROR nova.compute.manager [ 801.042228] env[61594]: Traceback (most recent call last): [ 801.042228] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 801.042228] env[61594]: listener.cb(fileno) [ 801.042228] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 801.042228] env[61594]: result = function(*args, **kwargs) [ 801.042228] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 801.042228] env[61594]: return func(*args, **kwargs) [ 801.042228] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 801.042228] env[61594]: raise e [ 801.042228] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 801.042228] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 801.042228] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 801.042228] env[61594]: created_port_ids = self._update_ports_for_instance( [ 801.042228] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 801.042228] env[61594]: with excutils.save_and_reraise_exception(): [ 801.042228] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.042228] env[61594]: self.force_reraise() [ 801.042228] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.042228] env[61594]: raise self.value [ 801.042228] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 801.042228] env[61594]: updated_port = self._update_port( [ 801.042228] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.042228] env[61594]: _ensure_no_port_binding_failure(port) [ 801.042228] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.042228] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 801.043442] env[61594]: nova.exception.PortBindingFailed: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. [ 801.043442] env[61594]: Removing descriptor: 25 [ 801.043442] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c76daed21fff41a28154baa0d3e96393 [ 801.043736] env[61594]: ERROR nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Traceback (most recent call last): [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] yield resources [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self.driver.spawn(context, instance, image_meta, [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self._vmops.spawn(context, instance, image_meta, injected_files, [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] vm_ref = self.build_virtual_machine(instance, [ 801.043736] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] vif_infos = vmwarevif.get_vif_info(self._session, [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] for vif in network_info: [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return self._sync_wrapper(fn, *args, **kwargs) [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self.wait() [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self[:] = self._gt.wait() [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return self._exit_event.wait() [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 801.044190] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] result = hub.switch() [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return self.greenlet.switch() [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] result = function(*args, **kwargs) [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return func(*args, **kwargs) [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] raise e [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] nwinfo = self.network_api.allocate_for_instance( [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] created_port_ids = self._update_ports_for_instance( [ 801.044698] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] with excutils.save_and_reraise_exception(): [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self.force_reraise() [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] raise self.value [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] updated_port = self._update_port( [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] _ensure_no_port_binding_failure(port) [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] raise exception.PortBindingFailed(port_id=port['id']) [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] nova.exception.PortBindingFailed: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. [ 801.045526] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] [ 801.046317] env[61594]: INFO nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Terminating instance [ 801.046363] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "refresh_cache-11ca01d2-83e3-42c4-bef5-87459148e858" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 801.046532] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquired lock "refresh_cache-11ca01d2-83e3-42c4-bef5-87459148e858" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.046749] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 801.047182] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 36768db6a75a42a8ad4b1840c3461ef8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.057035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36768db6a75a42a8ad4b1840c3461ef8 [ 801.078389] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9b4861fd8bc4ad096068d3b9a3acfde [ 801.081496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg ffd9774396034dc382ddda3ec7403a01 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.113652] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffd9774396034dc382ddda3ec7403a01 [ 801.114826] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 801.132486] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 801.142895] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.143171] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.143333] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.143645] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.143716] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.143900] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.144170] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.144401] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.144622] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.144829] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.145057] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.145957] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ef3607-9633-42ff-95c3-a77006afd3fc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.158293] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aec3873-e906-4909-b3ce-bfd8b8dce41e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.235357] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e4d630-aa39-4087-a2a7-7b87d311f2d6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.243215] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40efaf5-e79c-4364-ac04-d73cad5d1d7f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.279341] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e84dc7b-32d9-4e7e-b343-137cea6aedcc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.288302] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8d6f5ed-0f31-4b98-a69b-bc707d48cd4e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.305528] env[61594]: DEBUG nova.compute.provider_tree [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.306092] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 1af915acabb14b0d8be4200757c837a7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.316411] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1af915acabb14b0d8be4200757c837a7 [ 801.317708] env[61594]: DEBUG nova.scheduler.client.report [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 801.320700] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg f0c4d19e771f4430b47a0772ae7e84ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.323867] env[61594]: DEBUG nova.policy [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '185a047edb6241d59feb8ad5d2bbc749', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '201633229817424689c8fe5557911ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 801.338264] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0c4d19e771f4430b47a0772ae7e84ff [ 801.339531] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.355s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.339658] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 801.341645] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg a386b0406fa74c59adae1992e687c351 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.376036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a386b0406fa74c59adae1992e687c351 [ 801.377821] env[61594]: DEBUG nova.compute.utils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 801.378503] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 30c39d0952dc4aeeb6be574c830ba29d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.381537] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 801.383160] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 801.410718] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30c39d0952dc4aeeb6be574c830ba29d [ 801.411506] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 801.413442] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg b6bd2f39a6fa4362bea364e5ce9e2867 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.461840] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6bd2f39a6fa4362bea364e5ce9e2867 [ 801.463720] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 9ec31beb2b3243cc8bda7244a2f39002 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.506943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ec31beb2b3243cc8bda7244a2f39002 [ 801.508102] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 801.541091] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 801.541971] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 801.542222] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 801.542386] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 801.542536] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 801.542777] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 801.542932] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 801.543211] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 801.543418] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 801.543596] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 801.543781] env[61594]: DEBUG nova.virt.hardware [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 801.544925] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5289b8-e520-4123-8ff1-aeab63c47b8e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.553881] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034c76c0-1317-4964-a393-eee6337b923d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.688203] env[61594]: DEBUG nova.policy [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '185a047edb6241d59feb8ad5d2bbc749', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '201633229817424689c8fe5557911ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 801.957553] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.958106] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 841b8dd289d14ca6ba5f08e0d51a8483 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 801.969603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 841b8dd289d14ca6ba5f08e0d51a8483 [ 801.970303] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Releasing lock "refresh_cache-11ca01d2-83e3-42c4-bef5-87459148e858" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 801.970708] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 801.970902] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 801.971548] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dde7485-692d-4d23-9f76-8efa2f04525f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.982644] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d043012-e1dc-4598-a807-00ada7a6baa5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.013400] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 11ca01d2-83e3-42c4-bef5-87459148e858 could not be found. [ 802.013661] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 802.013833] env[61594]: INFO nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Took 0.04 seconds to destroy the instance on the hypervisor. [ 802.014101] env[61594]: DEBUG oslo.service.loopingcall [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 802.014353] env[61594]: DEBUG nova.compute.manager [-] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 802.014450] env[61594]: DEBUG nova.network.neutron [-] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 802.126697] env[61594]: DEBUG nova.network.neutron [-] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.127253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e85bdfc8cf75466e9898676412cf15dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 802.135802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e85bdfc8cf75466e9898676412cf15dd [ 802.136402] env[61594]: DEBUG nova.network.neutron [-] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.136813] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7655787d36ae4e4caab7ba81c53e8947 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 802.146270] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7655787d36ae4e4caab7ba81c53e8947 [ 802.146735] env[61594]: INFO nova.compute.manager [-] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Took 0.13 seconds to deallocate network for instance. [ 802.149126] env[61594]: DEBUG nova.compute.claims [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 802.149372] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.149530] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.151517] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 938747edd2c64ab7aa14e18638a4ae16 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 802.199411] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 938747edd2c64ab7aa14e18638a4ae16 [ 802.388565] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e565cdb-3084-4d90-a45e-98025f4785c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.397376] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45ba052-7bf6-4aa4-85d7-04f4081c47ac {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.432285] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bdb609-a89d-4f8c-b2ee-82f34aa40870 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.445189] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14597069-ab15-4c07-9c07-02b335805ed3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.468903] env[61594]: DEBUG nova.compute.provider_tree [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.470790] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg e344ede629094bef9e52bdc142680614 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 802.479874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e344ede629094bef9e52bdc142680614 [ 802.481325] env[61594]: DEBUG nova.scheduler.client.report [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 802.484206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 33d85c7a3e0e416b85c58b5204835049 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 802.511355] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33d85c7a3e0e416b85c58b5204835049 [ 802.514044] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.363s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.514928] env[61594]: ERROR nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Traceback (most recent call last): [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self.driver.spawn(context, instance, image_meta, [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self._vmops.spawn(context, instance, image_meta, injected_files, [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] vm_ref = self.build_virtual_machine(instance, [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] vif_infos = vmwarevif.get_vif_info(self._session, [ 802.514928] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] for vif in network_info: [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return self._sync_wrapper(fn, *args, **kwargs) [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self.wait() [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self[:] = self._gt.wait() [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return self._exit_event.wait() [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] result = hub.switch() [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 802.515368] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return self.greenlet.switch() [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] result = function(*args, **kwargs) [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] return func(*args, **kwargs) [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] raise e [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] nwinfo = self.network_api.allocate_for_instance( [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] created_port_ids = self._update_ports_for_instance( [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] with excutils.save_and_reraise_exception(): [ 802.515747] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] self.force_reraise() [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] raise self.value [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] updated_port = self._update_port( [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] _ensure_no_port_binding_failure(port) [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] raise exception.PortBindingFailed(port_id=port['id']) [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] nova.exception.PortBindingFailed: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. [ 802.516171] env[61594]: ERROR nova.compute.manager [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] [ 802.516487] env[61594]: DEBUG nova.compute.utils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 802.519016] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Build of instance 11ca01d2-83e3-42c4-bef5-87459148e858 was re-scheduled: Binding failed for port c8c36b11-f4c4-41e7-984e-dbd996ee47d9, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 802.519016] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 802.519016] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "refresh_cache-11ca01d2-83e3-42c4-bef5-87459148e858" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.519016] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquired lock "refresh_cache-11ca01d2-83e3-42c4-bef5-87459148e858" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.519268] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 802.519467] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg c63ecbc3b1524f97a33301d95de7637f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 802.530050] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c63ecbc3b1524f97a33301d95de7637f [ 802.646594] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.702727] env[61594]: ERROR nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. [ 802.702727] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 802.702727] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 802.702727] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 802.702727] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 802.702727] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 802.702727] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 802.702727] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 802.702727] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.702727] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 802.702727] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.702727] env[61594]: ERROR nova.compute.manager raise self.value [ 802.702727] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 802.702727] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 802.702727] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.702727] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 802.703402] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.703402] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 802.703402] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. [ 802.703402] env[61594]: ERROR nova.compute.manager [ 802.703402] env[61594]: Traceback (most recent call last): [ 802.703402] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 802.703402] env[61594]: listener.cb(fileno) [ 802.703402] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 802.703402] env[61594]: result = function(*args, **kwargs) [ 802.703402] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 802.703402] env[61594]: return func(*args, **kwargs) [ 802.703402] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 802.703402] env[61594]: raise e [ 802.703402] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 802.703402] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 802.703402] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 802.703402] env[61594]: created_port_ids = self._update_ports_for_instance( [ 802.703402] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 802.703402] env[61594]: with excutils.save_and_reraise_exception(): [ 802.703402] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.703402] env[61594]: self.force_reraise() [ 802.703402] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.703402] env[61594]: raise self.value [ 802.703402] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 802.703402] env[61594]: updated_port = self._update_port( [ 802.703402] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.703402] env[61594]: _ensure_no_port_binding_failure(port) [ 802.703402] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.703402] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 802.704578] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. [ 802.704578] env[61594]: Removing descriptor: 24 [ 802.704578] env[61594]: ERROR nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Traceback (most recent call last): [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] yield resources [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self.driver.spawn(context, instance, image_meta, [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 802.704578] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] vm_ref = self.build_virtual_machine(instance, [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] for vif in network_info: [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return self._sync_wrapper(fn, *args, **kwargs) [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self.wait() [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self[:] = self._gt.wait() [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return self._exit_event.wait() [ 802.706138] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] result = hub.switch() [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return self.greenlet.switch() [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] result = function(*args, **kwargs) [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return func(*args, **kwargs) [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] raise e [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] nwinfo = self.network_api.allocate_for_instance( [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 802.706644] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] created_port_ids = self._update_ports_for_instance( [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] with excutils.save_and_reraise_exception(): [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self.force_reraise() [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] raise self.value [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] updated_port = self._update_port( [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] _ensure_no_port_binding_failure(port) [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 802.707085] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] raise exception.PortBindingFailed(port_id=port['id']) [ 802.707437] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] nova.exception.PortBindingFailed: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. [ 802.707437] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] [ 802.707437] env[61594]: INFO nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Terminating instance [ 802.707437] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 802.707437] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.707437] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 802.708109] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 94dd55404bb7490ba7c424899c490c5f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 802.717113] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94dd55404bb7490ba7c424899c490c5f [ 802.732841] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Acquiring lock "500259b0-a57d-43ff-9c88-46190b6a3a10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.732965] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Lock "500259b0-a57d-43ff-9c88-46190b6a3a10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 802.757733] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 802.773009] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "f7d0db3d-9247-409a-bf1e-4b53c2368ddc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 802.773268] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "f7d0db3d-9247-409a-bf1e-4b53c2368ddc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.098592] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.099233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 6d2579f4ba154acbb863ed2069b31f6e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.113020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d2579f4ba154acbb863ed2069b31f6e [ 803.113020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.113020] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 803.113020] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 803.113299] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bd8c506-7d23-46a8-b0f8-b049143b16bb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.124061] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0b2314-3223-4cb2-9c54-1b122e71ede0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.149159] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72c150cd-b1f2-451d-bb6b-6b8b668e97a1 could not be found. [ 803.149373] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 803.149564] env[61594]: INFO nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 803.149853] env[61594]: DEBUG oslo.service.loopingcall [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 803.150088] env[61594]: DEBUG nova.compute.manager [-] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 803.150181] env[61594]: DEBUG nova.network.neutron [-] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 803.249184] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Successfully created port: c030586c-57a9-4ed0-bfb3-955b0961e165 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.263445] env[61594]: DEBUG nova.network.neutron [-] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.263445] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 20e8011064684309a6435659ef0ec3b7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.272088] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20e8011064684309a6435659ef0ec3b7 [ 803.272540] env[61594]: DEBUG nova.network.neutron [-] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.272938] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4d0328543f634bbd907d5f064204bd7a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.285798] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d0328543f634bbd907d5f064204bd7a [ 803.286732] env[61594]: INFO nova.compute.manager [-] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Took 0.14 seconds to deallocate network for instance. [ 803.290016] env[61594]: DEBUG nova.compute.claims [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 803.290016] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 803.290144] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.291976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg dd37370d1ff943e7851b70bacc7c8014 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.314276] env[61594]: DEBUG nova.compute.manager [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Received event network-changed-1e3c1995-8599-4229-aaad-23c894ad768e {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 803.314276] env[61594]: DEBUG nova.compute.manager [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Refreshing instance network info cache due to event network-changed-1e3c1995-8599-4229-aaad-23c894ad768e. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 803.314276] env[61594]: DEBUG oslo_concurrency.lockutils [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] Acquiring lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.314276] env[61594]: DEBUG oslo_concurrency.lockutils [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] Acquired lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.314276] env[61594]: DEBUG nova.network.neutron [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Refreshing network info cache for port 1e3c1995-8599-4229-aaad-23c894ad768e {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.314547] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] Expecting reply to msg 3f9f722853a04ffcb0ce14f09fc2fb7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.325251] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f9f722853a04ffcb0ce14f09fc2fb7e [ 803.336030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd37370d1ff943e7851b70bacc7c8014 [ 803.389619] env[61594]: DEBUG nova.network.neutron [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.444696] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Successfully created port: d3a45c31-7ab7-489a-aac0-8e6c223de0e7 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.528193] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.529011] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 3b6d16cb822d444ba725d96bb4432cf8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.545697] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b6d16cb822d444ba725d96bb4432cf8 [ 803.546889] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Releasing lock "refresh_cache-11ca01d2-83e3-42c4-bef5-87459148e858" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.547310] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 803.547357] env[61594]: DEBUG nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 803.547504] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 803.644387] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.644987] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 6df61bcdc08d4e67a93bddf8f8a48142 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.657625] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6df61bcdc08d4e67a93bddf8f8a48142 [ 803.658630] env[61594]: DEBUG nova.network.neutron [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.659191] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg f3a0da28525e4c0c9961f945b7b920f7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.674927] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3a0da28525e4c0c9961f945b7b920f7 [ 803.676258] env[61594]: INFO nova.compute.manager [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 11ca01d2-83e3-42c4-bef5-87459148e858] Took 0.13 seconds to deallocate network for instance. [ 803.678971] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 1f8841d94a5e4ad2a57850b844a23ad2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.762259] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f8841d94a5e4ad2a57850b844a23ad2 [ 803.771522] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg cc3b1efe27594475ae3bb112f99aaefb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.779110] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71c5f37-43f3-4850-876f-e31daa368599 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.792939] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Successfully created port: 99a82fe7-f10a-4919-91d9-fdde7ed0a044 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.795901] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fdc4b0-b258-4dea-8ab6-7fdb258ee406 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.833796] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cc3b1efe27594475ae3bb112f99aaefb [ 803.835797] env[61594]: ERROR nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. [ 803.835797] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 803.835797] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 803.835797] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 803.835797] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.835797] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 803.835797] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.835797] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 803.835797] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.835797] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 803.835797] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.835797] env[61594]: ERROR nova.compute.manager raise self.value [ 803.835797] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.835797] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 803.835797] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.835797] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 803.836381] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.836381] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 803.836381] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. [ 803.836381] env[61594]: ERROR nova.compute.manager [ 803.836381] env[61594]: Traceback (most recent call last): [ 803.836381] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 803.836381] env[61594]: listener.cb(fileno) [ 803.836381] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 803.836381] env[61594]: result = function(*args, **kwargs) [ 803.836381] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.836381] env[61594]: return func(*args, **kwargs) [ 803.836381] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 803.836381] env[61594]: raise e [ 803.836381] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 803.836381] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 803.836381] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.836381] env[61594]: created_port_ids = self._update_ports_for_instance( [ 803.836381] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.836381] env[61594]: with excutils.save_and_reraise_exception(): [ 803.836381] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.836381] env[61594]: self.force_reraise() [ 803.836381] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.836381] env[61594]: raise self.value [ 803.836381] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.836381] env[61594]: updated_port = self._update_port( [ 803.836381] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.836381] env[61594]: _ensure_no_port_binding_failure(port) [ 803.836381] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.836381] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 803.837905] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. [ 803.837905] env[61594]: Removing descriptor: 21 [ 803.840116] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f31bf2a-b458-4380-b4d9-d0a3e507e11b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.844607] env[61594]: ERROR nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Traceback (most recent call last): [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] yield resources [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self.driver.spawn(context, instance, image_meta, [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] vm_ref = self.build_virtual_machine(instance, [ 803.844607] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] for vif in network_info: [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return self._sync_wrapper(fn, *args, **kwargs) [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self.wait() [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self[:] = self._gt.wait() [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return self._exit_event.wait() [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 803.844945] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] result = hub.switch() [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return self.greenlet.switch() [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] result = function(*args, **kwargs) [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return func(*args, **kwargs) [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] raise e [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] nwinfo = self.network_api.allocate_for_instance( [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] created_port_ids = self._update_ports_for_instance( [ 803.845356] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] with excutils.save_and_reraise_exception(): [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self.force_reraise() [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] raise self.value [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] updated_port = self._update_port( [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] _ensure_no_port_binding_failure(port) [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] raise exception.PortBindingFailed(port_id=port['id']) [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] nova.exception.PortBindingFailed: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. [ 803.845706] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] [ 803.846059] env[61594]: INFO nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Terminating instance [ 803.847114] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "refresh_cache-af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.847683] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquired lock "refresh_cache-af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.847683] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.848181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg e9cdb1ce67d04c05b9efa16b414820dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.855319] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c16e65-f87f-4e79-a43e-33e673abeb11 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.860045] env[61594]: DEBUG nova.network.neutron [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.860771] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] Expecting reply to msg 7183681a314648d0848dc8cd42870aaa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.862591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9cdb1ce67d04c05b9efa16b414820dc [ 803.877885] env[61594]: DEBUG nova.compute.provider_tree [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.878657] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg e2697bcf28734d6795646fb73b105a05 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.880076] env[61594]: INFO nova.scheduler.client.report [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Deleted allocations for instance 11ca01d2-83e3-42c4-bef5-87459148e858 [ 803.886279] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7183681a314648d0848dc8cd42870aaa [ 803.888277] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 1fea0c21e8df4daf8cc52c4ac9c547a6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.888277] env[61594]: DEBUG oslo_concurrency.lockutils [req-0f6f0f2d-496e-4d3d-8834-0be80f04caf0 req-42b2e291-f6d8-4f26-9d8e-a77834f2e245 service nova] Releasing lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 803.903474] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2697bcf28734d6795646fb73b105a05 [ 803.905421] env[61594]: DEBUG nova.scheduler.client.report [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 803.908633] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 78455ecaa65145faba267b21687e2e4f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.934015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1fea0c21e8df4daf8cc52c4ac9c547a6 [ 803.934015] env[61594]: DEBUG oslo_concurrency.lockutils [None req-dbcc0dcb-51d6-4649-a979-d110209f148b tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "11ca01d2-83e3-42c4-bef5-87459148e858" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.004s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.934015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg d857222ad517435db906ae93eb3207bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.951388] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d857222ad517435db906ae93eb3207bb [ 803.952024] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 803.959032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg c1167e8c548a4544a6de06e44ff2eeff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.959032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78455ecaa65145faba267b21687e2e4f [ 803.959032] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.668s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.959032] env[61594]: ERROR nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. [ 803.959032] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Traceback (most recent call last): [ 803.959032] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 803.959032] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self.driver.spawn(context, instance, image_meta, [ 803.959032] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] vm_ref = self.build_virtual_machine(instance, [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] for vif in network_info: [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return self._sync_wrapper(fn, *args, **kwargs) [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self.wait() [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.959492] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self[:] = self._gt.wait() [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return self._exit_event.wait() [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] result = hub.switch() [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return self.greenlet.switch() [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] result = function(*args, **kwargs) [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] return func(*args, **kwargs) [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] raise e [ 803.959876] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] nwinfo = self.network_api.allocate_for_instance( [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] created_port_ids = self._update_ports_for_instance( [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] with excutils.save_and_reraise_exception(): [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] self.force_reraise() [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] raise self.value [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] updated_port = self._update_port( [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.960414] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] _ensure_no_port_binding_failure(port) [ 803.960833] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.960833] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] raise exception.PortBindingFailed(port_id=port['id']) [ 803.960833] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] nova.exception.PortBindingFailed: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. [ 803.960833] env[61594]: ERROR nova.compute.manager [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] [ 803.960833] env[61594]: DEBUG nova.compute.utils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 803.961663] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Build of instance 72c150cd-b1f2-451d-bb6b-6b8b668e97a1 was re-scheduled: Binding failed for port 1e3c1995-8599-4229-aaad-23c894ad768e, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 803.961879] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 803.962113] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.962178] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.962685] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.962758] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg c0c1455c7656465da2d58903fefb8142 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 803.974925] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0c1455c7656465da2d58903fefb8142 [ 803.991771] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 5ba94196571a4d0c83a761e79936b64a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.008847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1167e8c548a4544a6de06e44ff2eeff [ 804.015365] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ba94196571a4d0c83a761e79936b64a [ 804.015365] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.032200] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.033710] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.033988] env[61594]: INFO nova.compute.claims [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.035947] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 3ab2a67f04ac4e67b8275b9cfd0e8e25 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.062456] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.086677] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ab2a67f04ac4e67b8275b9cfd0e8e25 [ 804.088562] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 4aba093757c74e6590dfaa4f4b4330c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.099786] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4aba093757c74e6590dfaa4f4b4330c5 [ 804.156192] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.293093] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323e975b-2134-4d59-90cb-d50b4c4f0974 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.302130] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650553bb-0635-48fb-bd60-66174ddef230 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.336847] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b08541-a234-4385-ae0c-9a80dd44477b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.347700] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ca047e-570e-4b3d-9459-5da294c82aae {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.364835] env[61594]: DEBUG nova.compute.provider_tree [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.365431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg ce3c506e1fca453ca295363980480296 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.383617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce3c506e1fca453ca295363980480296 [ 804.384852] env[61594]: DEBUG nova.scheduler.client.report [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 804.389257] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg a1852109c72d4e3cb69139ac48ae7d72 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.409285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1852109c72d4e3cb69139ac48ae7d72 [ 804.410091] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.378s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.411218] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 804.413310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 0164b6dd77254029acdb09cf2c06db44 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.467596] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0164b6dd77254029acdb09cf2c06db44 [ 804.469545] env[61594]: DEBUG nova.compute.utils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 804.470364] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 851cd01e8b6f48519604d3b72c78a287 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.471717] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 804.471969] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 804.482309] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 851cd01e8b6f48519604d3b72c78a287 [ 804.483060] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 804.484808] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg dfe0c686a76c480385b7341c6fd38f7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.526841] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfe0c686a76c480385b7341c6fd38f7e [ 804.529988] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 76f8154d466849d28842d2b5a767c1eb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.574963] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76f8154d466849d28842d2b5a767c1eb [ 804.576349] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 804.606824] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 804.607082] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 804.607256] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.607485] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 804.607640] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.607788] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 804.608008] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 804.608469] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 804.608703] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 804.609801] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 804.609801] env[61594]: DEBUG nova.virt.hardware [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.609920] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67709cd0-19f4-4574-8f01-16570d7d1831 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.619853] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32c9e0a4-1be2-42f6-86b2-90beae950e20 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.691840] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.692601] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg b6cd93465ed348319dc7b938faef46b3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.703880] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6cd93465ed348319dc7b938faef46b3 [ 804.704528] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-72c150cd-b1f2-451d-bb6b-6b8b668e97a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.704777] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 804.704922] env[61594]: DEBUG nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 804.705103] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.708060] env[61594]: DEBUG nova.policy [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0ec87e86b5114bde83f2ab525f42f271', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '54b079f5fde443dd9ac1e0f1666e46c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 804.725366] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.725865] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 84f4c8b7b31543dc85270a25258c5f7b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.734468] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84f4c8b7b31543dc85270a25258c5f7b [ 804.735102] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Releasing lock "refresh_cache-af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.735475] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 804.735618] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 804.736127] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fede147-cf1b-4838-ab30-529473b22282 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.746689] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a261a3c-b4ad-4d4d-b0fc-3ac573058b69 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.772180] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance af3ae6a1-44e4-49f7-905c-1480ec3ad0a9 could not be found. [ 804.772496] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 804.772674] env[61594]: INFO nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 804.772940] env[61594]: DEBUG oslo.service.loopingcall [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.773454] env[61594]: DEBUG nova.compute.manager [-] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 804.773559] env[61594]: DEBUG nova.network.neutron [-] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.779824] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.780414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 65ecf46c930d41a5840f35a24842498b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.788629] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 65ecf46c930d41a5840f35a24842498b [ 804.789134] env[61594]: DEBUG nova.network.neutron [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.789633] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg afc6749dba944074a8159f04e23190cf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.797112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afc6749dba944074a8159f04e23190cf [ 804.797681] env[61594]: INFO nova.compute.manager [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 72c150cd-b1f2-451d-bb6b-6b8b668e97a1] Took 0.09 seconds to deallocate network for instance. [ 804.801202] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 7c7c2eae413b4f658b4826ffa9e9008d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.853859] env[61594]: DEBUG nova.network.neutron [-] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.854407] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 361421576d2e43cbbf94e4d53ee40ed2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.855522] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c7c2eae413b4f658b4826ffa9e9008d [ 804.864780] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 37c5cc74e688444ba7a9f7c1d4bfc7e2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.865950] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 361421576d2e43cbbf94e4d53ee40ed2 [ 804.866706] env[61594]: DEBUG nova.network.neutron [-] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.870255] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 22bb6bbe24234e048d334c7966c5acfe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.883607] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22bb6bbe24234e048d334c7966c5acfe [ 804.884166] env[61594]: INFO nova.compute.manager [-] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Took 0.11 seconds to deallocate network for instance. [ 804.886374] env[61594]: DEBUG nova.compute.claims [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 804.886543] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 804.886751] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 804.888851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 971926ad1605441c8a72720bead263da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.920872] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37c5cc74e688444ba7a9f7c1d4bfc7e2 [ 804.949330] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 971926ad1605441c8a72720bead263da [ 804.959939] env[61594]: INFO nova.scheduler.client.report [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Deleted allocations for instance 72c150cd-b1f2-451d-bb6b-6b8b668e97a1 [ 804.970646] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 85fa71c29d49424d914482a9e62827f9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 804.989252] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85fa71c29d49424d914482a9e62827f9 [ 804.989876] env[61594]: DEBUG oslo_concurrency.lockutils [None req-edefb713-b593-4bfd-8772-3c7cb2678348 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "72c150cd-b1f2-451d-bb6b-6b8b668e97a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.756s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 804.990751] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg cf0989ae3244422890b11d07eca6b838 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.030619] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf0989ae3244422890b11d07eca6b838 [ 805.037800] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 805.039915] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg f185d90b8313417ab2cb9784aa7e6c3f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.081949] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f185d90b8313417ab2cb9784aa7e6c3f [ 805.100675] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 805.149614] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b77ee3-b1df-49ee-ab3f-f90b9349debe {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.159777] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c3e936-6320-4801-800d-c3436a37ca33 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.193432] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2205ecc-1064-40f2-adb5-c2ed46e5849a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.202913] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4d1e91-9be1-4c5b-8a82-302dc615fcfe {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.219338] env[61594]: DEBUG nova.compute.provider_tree [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.219338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg b9b49c3300b04eefab880ed2f47401e9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.229648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b9b49c3300b04eefab880ed2f47401e9 [ 805.230721] env[61594]: DEBUG nova.scheduler.client.report [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 805.233118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 5de2490ac2ae4ed0a013a5e66ad05386 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.247165] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5de2490ac2ae4ed0a013a5e66ad05386 [ 805.248009] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.361s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.248584] env[61594]: ERROR nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Traceback (most recent call last): [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self.driver.spawn(context, instance, image_meta, [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] vm_ref = self.build_virtual_machine(instance, [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] vif_infos = vmwarevif.get_vif_info(self._session, [ 805.248584] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] for vif in network_info: [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return self._sync_wrapper(fn, *args, **kwargs) [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self.wait() [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self[:] = self._gt.wait() [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return self._exit_event.wait() [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] result = hub.switch() [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 805.248905] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return self.greenlet.switch() [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] result = function(*args, **kwargs) [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] return func(*args, **kwargs) [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] raise e [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] nwinfo = self.network_api.allocate_for_instance( [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] created_port_ids = self._update_ports_for_instance( [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] with excutils.save_and_reraise_exception(): [ 805.249268] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] self.force_reraise() [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] raise self.value [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] updated_port = self._update_port( [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] _ensure_no_port_binding_failure(port) [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] raise exception.PortBindingFailed(port_id=port['id']) [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] nova.exception.PortBindingFailed: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. [ 805.249688] env[61594]: ERROR nova.compute.manager [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] [ 805.250041] env[61594]: DEBUG nova.compute.utils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 805.250398] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.150s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.252088] env[61594]: INFO nova.compute.claims [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.253846] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg d9512a37cb49446dab7f1940004a75fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.255070] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Build of instance af3ae6a1-44e4-49f7-905c-1480ec3ad0a9 was re-scheduled: Binding failed for port 53214105-fc54-4a7d-ac37-7cb3c832497a, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 805.255696] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 805.255798] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquiring lock "refresh_cache-af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 805.256118] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Acquired lock "refresh_cache-af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.256118] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.256610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 103d9eeae23b4022b886d62f8ce7eab1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.266496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 103d9eeae23b4022b886d62f8ce7eab1 [ 805.294591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d9512a37cb49446dab7f1940004a75fe [ 805.296855] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 54f04ccce0b04392bb914977879ffe56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.309690] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54f04ccce0b04392bb914977879ffe56 [ 805.355572] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.491070] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1279aea1-99ee-43f2-815b-11e2f6470955 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.500381] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccaff71-8ac7-4cfd-b1c5-94d6826c295a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.534923] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1305d581-01e4-4401-97e7-3ee9bb32c616 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.539453] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 805.543573] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6fa58b-b282-4d44-afea-0f2527fb24e9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.557977] env[61594]: DEBUG nova.compute.provider_tree [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.558524] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 51de0e7b5f9f4bfe916761f5d4f36b15 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.566937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51de0e7b5f9f4bfe916761f5d4f36b15 [ 805.567919] env[61594]: DEBUG nova.scheduler.client.report [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 805.570451] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 900b026d85984d60b419ebeb82bc8035 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.584716] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 900b026d85984d60b419ebeb82bc8035 [ 805.585683] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.335s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.586278] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 805.587967] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg dfaeacebd9a24f76a2701f215b67416d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.639114] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfaeacebd9a24f76a2701f215b67416d [ 805.640745] env[61594]: DEBUG nova.compute.utils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.641535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 4b094c7df67941308a428e0943eb0fe5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.643230] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 805.643409] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 805.656121] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b094c7df67941308a428e0943eb0fe5 [ 805.656121] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 805.657853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 35f13131495f44d5bfe63d86c79f7c10 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.694149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35f13131495f44d5bfe63d86c79f7c10 [ 805.697158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg b918cf256b3e4a4aa0b2d16140e31c2f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 805.729246] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b918cf256b3e4a4aa0b2d16140e31c2f [ 805.730562] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 805.757536] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.757813] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.757973] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.758170] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.758320] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.758470] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.758676] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.758833] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.758998] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.759177] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.759385] env[61594]: DEBUG nova.virt.hardware [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.760465] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b10d00-461c-4758-934a-e4b5d5671434 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.769704] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c814af6-921d-4189-ac4f-1c22c4f67c7f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.950317] env[61594]: DEBUG nova.policy [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcf95f76d0f54cbebde1301f62b44bcd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7300699b4cb49de89d096e3a1ac1778', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 806.244884] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.245466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg a13cf80f480046df8a2e0f53820731c0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.257649] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a13cf80f480046df8a2e0f53820731c0 [ 806.258332] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Releasing lock "refresh_cache-af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.258541] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 806.258716] env[61594]: DEBUG nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 806.258884] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 806.281125] env[61594]: ERROR nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. [ 806.281125] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 806.281125] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 806.281125] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 806.281125] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 806.281125] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 806.281125] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 806.281125] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 806.281125] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.281125] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 806.281125] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.281125] env[61594]: ERROR nova.compute.manager raise self.value [ 806.281125] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 806.281125] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 806.281125] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.281125] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 806.281871] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.281871] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 806.281871] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. [ 806.281871] env[61594]: ERROR nova.compute.manager [ 806.281871] env[61594]: Traceback (most recent call last): [ 806.281871] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 806.281871] env[61594]: listener.cb(fileno) [ 806.281871] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 806.281871] env[61594]: result = function(*args, **kwargs) [ 806.281871] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 806.281871] env[61594]: return func(*args, **kwargs) [ 806.281871] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 806.281871] env[61594]: raise e [ 806.281871] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 806.281871] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 806.281871] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 806.281871] env[61594]: created_port_ids = self._update_ports_for_instance( [ 806.281871] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 806.281871] env[61594]: with excutils.save_and_reraise_exception(): [ 806.281871] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.281871] env[61594]: self.force_reraise() [ 806.281871] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.281871] env[61594]: raise self.value [ 806.281871] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 806.281871] env[61594]: updated_port = self._update_port( [ 806.281871] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.281871] env[61594]: _ensure_no_port_binding_failure(port) [ 806.281871] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.281871] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 806.282949] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. [ 806.282949] env[61594]: Removing descriptor: 22 [ 806.282949] env[61594]: ERROR nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Traceback (most recent call last): [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] yield resources [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self.driver.spawn(context, instance, image_meta, [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 806.282949] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] vm_ref = self.build_virtual_machine(instance, [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] vif_infos = vmwarevif.get_vif_info(self._session, [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] for vif in network_info: [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return self._sync_wrapper(fn, *args, **kwargs) [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self.wait() [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self[:] = self._gt.wait() [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return self._exit_event.wait() [ 806.283739] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] result = hub.switch() [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return self.greenlet.switch() [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] result = function(*args, **kwargs) [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return func(*args, **kwargs) [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] raise e [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] nwinfo = self.network_api.allocate_for_instance( [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 806.284285] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] created_port_ids = self._update_ports_for_instance( [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] with excutils.save_and_reraise_exception(): [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self.force_reraise() [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] raise self.value [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] updated_port = self._update_port( [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] _ensure_no_port_binding_failure(port) [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 806.284773] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] raise exception.PortBindingFailed(port_id=port['id']) [ 806.285209] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] nova.exception.PortBindingFailed: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. [ 806.285209] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] [ 806.285209] env[61594]: INFO nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Terminating instance [ 806.289705] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Acquiring lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 806.289768] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Acquired lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.289973] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 806.293493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 68a094f716a944ad8084633137519452 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.302023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 68a094f716a944ad8084633137519452 [ 806.342019] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.342019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg fe5b0b711ab642ea9d3c380a663910d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.349293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fe5b0b711ab642ea9d3c380a663910d5 [ 806.349293] env[61594]: DEBUG nova.network.neutron [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.349293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 05f252f7657b40ad9be4f1f62dfa9ca5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.368216] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 05f252f7657b40ad9be4f1f62dfa9ca5 [ 806.368935] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.373191] env[61594]: INFO nova.compute.manager [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] [instance: af3ae6a1-44e4-49f7-905c-1480ec3ad0a9] Took 0.11 seconds to deallocate network for instance. [ 806.375698] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg 296eaa5ad3fd4510acc03dda96a2d8ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.420923] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 296eaa5ad3fd4510acc03dda96a2d8ed [ 806.426045] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg fdee231a1c394426ac7c21d9c8581fa4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.480344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdee231a1c394426ac7c21d9c8581fa4 [ 806.516935] env[61594]: INFO nova.scheduler.client.report [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Deleted allocations for instance af3ae6a1-44e4-49f7-905c-1480ec3ad0a9 [ 806.527232] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Expecting reply to msg c6ce2f6d645144389144d1a87b273a6e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.536333] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Successfully created port: e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.551581] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6ce2f6d645144389144d1a87b273a6e [ 806.552674] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4ed7fef4-ba15-454f-8f48-35763a9c12f7 tempest-SecurityGroupsTestJSON-458726018 tempest-SecurityGroupsTestJSON-458726018-project-member] Lock "af3ae6a1-44e4-49f7-905c-1480ec3ad0a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.252s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 806.762914] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.763392] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg f5c779f5828248c89394c56909a07cb1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 806.773745] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5c779f5828248c89394c56909a07cb1 [ 806.774388] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Releasing lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 806.774926] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 806.775803] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0242236-441e-4864-81d5-def64cf71004 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.788311] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e045e9-aad9-4ef3-8f8e-c3947afac92c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.814472] env[61594]: WARNING nova.virt.vmwareapi.driver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 48b27adc-0bc7-44dd-9330-d0b30593f95f could not be found. [ 806.814724] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 806.818350] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bcea14bc-7d3e-461f-8765-6b76d587a193 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.830674] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc22de2-231f-438f-876e-2f752a4e2518 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.865997] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 48b27adc-0bc7-44dd-9330-d0b30593f95f could not be found. [ 806.866271] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 806.866501] env[61594]: INFO nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Took 0.09 seconds to destroy the instance on the hypervisor. [ 806.866781] env[61594]: DEBUG oslo.service.loopingcall [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 806.867112] env[61594]: DEBUG nova.compute.manager [-] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 806.867112] env[61594]: DEBUG nova.network.neutron [-] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 806.994012] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.994313] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 806.995178] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 6eec2eba761f45dd8b3dde08deae0bb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.008467] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6eec2eba761f45dd8b3dde08deae0bb4 [ 807.009017] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 807.011853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 395a6a28bf6e4fcabeaaad624bece799 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.052769] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 395a6a28bf6e4fcabeaaad624bece799 [ 807.074737] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.074737] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.076422] env[61594]: INFO nova.compute.claims [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.078817] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ea30156bfeb54914b429beae609a6eaf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.119197] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ea30156bfeb54914b429beae609a6eaf [ 807.121591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ae68f53d1a5a49d6b270ddab1f44dfac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.129200] env[61594]: DEBUG nova.network.neutron [-] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.129705] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 94b6b8e3e4b042d5b8037a986099f11d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.131602] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae68f53d1a5a49d6b270ddab1f44dfac [ 807.145879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94b6b8e3e4b042d5b8037a986099f11d [ 807.145879] env[61594]: DEBUG nova.network.neutron [-] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.145879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 688d982670814e77beef46bddddca876 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.156055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 688d982670814e77beef46bddddca876 [ 807.156556] env[61594]: INFO nova.compute.manager [-] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Took 0.29 seconds to deallocate network for instance. [ 807.259524] env[61594]: INFO nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Took 0.10 seconds to detach 1 volumes for instance. [ 807.261912] env[61594]: DEBUG nova.compute.claims [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 807.261912] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.307945] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02dbf076-f3b9-4f4a-af99-8d923337f0d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.318045] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa26ffa-e432-433f-a756-0bf044e87b95 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.322738] env[61594]: DEBUG nova.compute.manager [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Received event network-changed-42447106-00fb-4146-a1a8-05ce6aa104cc {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 807.322966] env[61594]: DEBUG nova.compute.manager [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Refreshing instance network info cache due to event network-changed-42447106-00fb-4146-a1a8-05ce6aa104cc. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 807.323243] env[61594]: DEBUG oslo_concurrency.lockutils [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] Acquiring lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.323444] env[61594]: DEBUG oslo_concurrency.lockutils [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] Acquired lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.323662] env[61594]: DEBUG nova.network.neutron [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Refreshing network info cache for port 42447106-00fb-4146-a1a8-05ce6aa104cc {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 807.324117] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] Expecting reply to msg 5738cb386c954c959f20dc0457b8984c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.356773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5738cb386c954c959f20dc0457b8984c [ 807.358474] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca058fb-8eca-456c-a76c-53cd0d28a774 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.371585] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c267698-5bb5-4c6f-810c-020b23bf2bf7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.390749] env[61594]: DEBUG nova.compute.provider_tree [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.391518] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg c5cf81243b3145f99fdbbee095e40c9d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.402165] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5cf81243b3145f99fdbbee095e40c9d [ 807.403619] env[61594]: DEBUG nova.scheduler.client.report [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 807.406744] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg a9c39a05d47248e48af837cbaf8f346c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.424488] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9c39a05d47248e48af837cbaf8f346c [ 807.425480] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.351s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.425866] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 807.427835] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg e6f68b238bca462eaf970edccfedfe80 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.428903] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.167s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.431773] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg b4d50e6482d34081be0b156402e4f7f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.445894] env[61594]: DEBUG nova.network.neutron [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.483212] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6f68b238bca462eaf970edccfedfe80 [ 807.484866] env[61594]: DEBUG nova.compute.utils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 807.485775] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 6fbcbbabaa184d2a9327bc55f1c9c3da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.490026] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 807.490026] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 807.492227] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4d50e6482d34081be0b156402e4f7f4 [ 807.496917] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fbcbbabaa184d2a9327bc55f1c9c3da [ 807.497429] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 807.499834] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg b32e971d7d294f18bc8a33b84f4eb510 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.539875] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.540623] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg a9fa1ac10278490b88cdc89314d2a593 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.545021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b32e971d7d294f18bc8a33b84f4eb510 [ 807.545421] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d6a2fe9bf1d54f13ae916ee698d5e24f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.566907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a9fa1ac10278490b88cdc89314d2a593 [ 807.581067] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.581796] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.581977] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 807.583045] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6a2fe9bf1d54f13ae916ee698d5e24f [ 807.585738] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 807.615893] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 807.616151] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 807.616312] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.616700] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 807.616700] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.616876] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 807.617182] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 807.617368] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 807.617540] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 807.617704] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 807.617876] env[61594]: DEBUG nova.virt.hardware [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 807.619061] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b020b5d-6130-48c6-a5c3-71b45b11e2e8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.631239] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01f90573-e005-4041-b507-8a30de41de6f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.689392] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bdcd4c-7041-4582-ab7e-4af22ec08926 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.698416] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de7e274-36ae-4fdd-9444-4ab57c291bd6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.733125] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25992aa4-eccf-43da-8ec7-f491bb987e86 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.742445] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3d96811-b919-46c1-ade7-bdcf6caffc69 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.759153] env[61594]: DEBUG nova.compute.provider_tree [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 807.759774] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 8933cd66b6fb4d92bdae5359d4b9cc78 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.779170] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8933cd66b6fb4d92bdae5359d4b9cc78 [ 807.780286] env[61594]: DEBUG nova.scheduler.client.report [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 807.782817] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 0e81a897a3d242508aad37946e6ef6a6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 807.809021] env[61594]: DEBUG nova.policy [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '789177a2f7be455cadec45cf03d67521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dfb77f12805418eaa6127fc75becec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 807.810507] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e81a897a3d242508aad37946e6ef6a6 [ 807.812092] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.383s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.813050] env[61594]: ERROR nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Traceback (most recent call last): [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self.driver.spawn(context, instance, image_meta, [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] vm_ref = self.build_virtual_machine(instance, [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.813050] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] for vif in network_info: [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return self._sync_wrapper(fn, *args, **kwargs) [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self.wait() [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self[:] = self._gt.wait() [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return self._exit_event.wait() [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] result = hub.switch() [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 807.813547] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return self.greenlet.switch() [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] result = function(*args, **kwargs) [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] return func(*args, **kwargs) [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] raise e [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] nwinfo = self.network_api.allocate_for_instance( [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] created_port_ids = self._update_ports_for_instance( [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] with excutils.save_and_reraise_exception(): [ 807.815286] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] self.force_reraise() [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] raise self.value [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] updated_port = self._update_port( [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] _ensure_no_port_binding_failure(port) [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] raise exception.PortBindingFailed(port_id=port['id']) [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] nova.exception.PortBindingFailed: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. [ 807.815722] env[61594]: ERROR nova.compute.manager [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] [ 807.816084] env[61594]: DEBUG nova.compute.utils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 807.819289] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Build of instance 48b27adc-0bc7-44dd-9330-d0b30593f95f was re-scheduled: Binding failed for port 42447106-00fb-4146-a1a8-05ce6aa104cc, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 807.819289] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 807.819289] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Acquiring lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.175945] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Successfully created port: f32cbb9a-7422-4f1e-b576-b28aa8cc4371 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.270542] env[61594]: DEBUG nova.network.neutron [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.271098] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] Expecting reply to msg da2a6af09f624b6394f4d7c27f5333a2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.278355] env[61594]: ERROR nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. [ 808.278355] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 808.278355] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 808.278355] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 808.278355] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 808.278355] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 808.278355] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 808.278355] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 808.278355] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.278355] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 808.278355] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.278355] env[61594]: ERROR nova.compute.manager raise self.value [ 808.278355] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 808.278355] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 808.278355] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.278355] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 808.278882] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.278882] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 808.278882] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. [ 808.278882] env[61594]: ERROR nova.compute.manager [ 808.278882] env[61594]: Traceback (most recent call last): [ 808.278882] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 808.278882] env[61594]: listener.cb(fileno) [ 808.278882] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 808.278882] env[61594]: result = function(*args, **kwargs) [ 808.278882] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 808.278882] env[61594]: return func(*args, **kwargs) [ 808.278882] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 808.278882] env[61594]: raise e [ 808.278882] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 808.278882] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 808.278882] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 808.278882] env[61594]: created_port_ids = self._update_ports_for_instance( [ 808.278882] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 808.278882] env[61594]: with excutils.save_and_reraise_exception(): [ 808.278882] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.278882] env[61594]: self.force_reraise() [ 808.278882] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.278882] env[61594]: raise self.value [ 808.278882] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 808.278882] env[61594]: updated_port = self._update_port( [ 808.278882] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.278882] env[61594]: _ensure_no_port_binding_failure(port) [ 808.278882] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.278882] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 808.279746] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. [ 808.279746] env[61594]: Removing descriptor: 19 [ 808.279746] env[61594]: ERROR nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Traceback (most recent call last): [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] yield resources [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self.driver.spawn(context, instance, image_meta, [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 808.279746] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] vm_ref = self.build_virtual_machine(instance, [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] for vif in network_info: [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return self._sync_wrapper(fn, *args, **kwargs) [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self.wait() [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self[:] = self._gt.wait() [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return self._exit_event.wait() [ 808.280108] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] result = hub.switch() [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return self.greenlet.switch() [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] result = function(*args, **kwargs) [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return func(*args, **kwargs) [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] raise e [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] nwinfo = self.network_api.allocate_for_instance( [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 808.281149] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] created_port_ids = self._update_ports_for_instance( [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] with excutils.save_and_reraise_exception(): [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self.force_reraise() [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] raise self.value [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] updated_port = self._update_port( [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] _ensure_no_port_binding_failure(port) [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 808.282563] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] raise exception.PortBindingFailed(port_id=port['id']) [ 808.283041] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] nova.exception.PortBindingFailed: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. [ 808.283041] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] [ 808.283041] env[61594]: INFO nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Terminating instance [ 808.283041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da2a6af09f624b6394f4d7c27f5333a2 [ 808.283172] env[61594]: DEBUG oslo_concurrency.lockutils [req-b70c3e0b-5265-4c0c-a4ca-5520f79afa13 req-b68491c3-db3b-4729-be2f-5004b53a1a0f service nova] Releasing lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.283686] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Acquiring lock "refresh_cache-93ecfc04-23c5-41fc-babf-1a5bd02769b8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 808.283834] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Acquired lock "refresh_cache-93ecfc04-23c5-41fc-babf-1a5bd02769b8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.283995] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 808.284448] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 1920acfb043248fa88f3025a74b197a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.286075] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Acquired lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.286075] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 808.286075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 18f27289d2bb42ba87ab002d25bc8057 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.294776] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1920acfb043248fa88f3025a74b197a1 [ 808.298052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18f27289d2bb42ba87ab002d25bc8057 [ 808.374105] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 808.389689] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 808.543520] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.543707] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 808.543833] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 808.545033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 622864d8f1944530aae2214c317f2806 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.570988] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 622864d8f1944530aae2214c317f2806 [ 808.573366] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.573587] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.573725] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.573855] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.573978] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.574180] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.574388] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.574468] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.574540] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 808.574656] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 808.575201] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 808.575862] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg c398ad42d6a94dc9a561c1639de14a3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.590012] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c398ad42d6a94dc9a561c1639de14a3a [ 808.591055] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.591282] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.591454] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 808.591624] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 808.592695] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f694961-0406-4e3e-9bf9-851cdb4095db {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.603745] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab686b43-d083-4e3e-990f-a844ce2711ac {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.622905] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47143b3c-3f57-41f4-a5a3-d0bd5a3f38a3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.631337] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72eff22-dbfa-4bb7-943e-a13ba52552e2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.669677] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181491MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 808.669677] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.669677] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.670072] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 1782d730f25b473bb398b0dfade0886a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.723186] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1782d730f25b473bb398b0dfade0886a [ 808.727335] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 32bcefcc347549ef9144ee20bda67b56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.742711] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32bcefcc347549ef9144ee20bda67b56 [ 808.771847] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 07d2f1e7-c08e-434c-aea7-941ef75f16ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.771945] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 28bcec42-4fb0-4ef1-b882-6224fdbcec16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.773605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg a37323f7a4be48f1ad5f31b5f3ce1d47 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.788390] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a37323f7a4be48f1ad5f31b5f3ce1d47 [ 808.790853] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 48b27adc-0bc7-44dd-9330-d0b30593f95f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.790853] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 93ecfc04-23c5-41fc-babf-1a5bd02769b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.790853] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.790853] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance e600280c-2414-420d-bc8d-6e3e7979fccf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.791112] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 4ad1a310-5786-4bb6-87ff-72069f692eff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.791112] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 500259b0-a57d-43ff-9c88-46190b6a3a10 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.791112] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance f7d0db3d-9247-409a-bf1e-4b53c2368ddc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.791112] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 808.791256] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 808.791256] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 808.826074] env[61594]: WARNING oslo_vmware.rw_handles [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 808.826074] env[61594]: ERROR oslo_vmware.rw_handles [ 808.826808] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 808.829372] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 808.829471] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Copying Virtual Disk [datastore1] vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/d82bc074-d91e-486c-8bfd-7ca8887cd517/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 808.830176] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e813943e-2a44-4f19-ae64-580789d96061 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.833219] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.833720] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg e73356ad494c457e8b4c67fe1f45d437 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.846207] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Waiting for the task: (returnval){ [ 808.846207] env[61594]: value = "task-1291403" [ 808.846207] env[61594]: _type = "Task" [ 808.846207] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.847354] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e73356ad494c457e8b4c67fe1f45d437 [ 808.848015] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Releasing lock "refresh_cache-93ecfc04-23c5-41fc-babf-1a5bd02769b8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.848490] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 808.848654] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 808.852950] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8092a233-d558-46c2-b536-75d059ed1a85 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.865351] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Task: {'id': task-1291403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.870913] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cae4061-2480-4ce7-b2d4-40a1c052fa46 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.900018] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 93ecfc04-23c5-41fc-babf-1a5bd02769b8 could not be found. [ 808.900609] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 808.900609] env[61594]: INFO nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 808.900754] env[61594]: DEBUG oslo.service.loopingcall [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 808.903879] env[61594]: DEBUG nova.compute.manager [-] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 808.903949] env[61594]: DEBUG nova.network.neutron [-] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 808.950057] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.950057] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 5896fe9a48414022a466e40f10b585fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.963272] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5896fe9a48414022a466e40f10b585fc [ 808.963272] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Releasing lock "refresh_cache-48b27adc-0bc7-44dd-9330-d0b30593f95f" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.963272] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 808.963272] env[61594]: DEBUG nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 808.963272] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 808.964185] env[61594]: DEBUG nova.network.neutron [-] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 808.965217] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e04a742de889466886b541649b076ada in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.977664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e04a742de889466886b541649b076ada [ 808.977664] env[61594]: DEBUG nova.network.neutron [-] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.977664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e19e865a6ee14fed96fef882acc86c5e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 808.985324] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e19e865a6ee14fed96fef882acc86c5e [ 808.986111] env[61594]: INFO nova.compute.manager [-] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Took 0.08 seconds to deallocate network for instance. [ 808.990822] env[61594]: DEBUG nova.compute.claims [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 808.991180] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 809.001128] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c218d4a3-8ed3-48a2-a276-54f12e0957f3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.009397] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a716d0c2-4321-4a84-935c-03c9b81fec20 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.838552] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 809.839151] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg b5655f45ba874cd9a99c87eb3b7755c8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.846364] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ae9b72-4221-409d-b3b2-56ebda9c502b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.849362] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5655f45ba874cd9a99c87eb3b7755c8 [ 809.849974] env[61594]: DEBUG nova.network.neutron [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.850471] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 7b328da7cca747fc8b7f66feb796413a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.860402] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7b328da7cca747fc8b7f66feb796413a [ 809.860824] env[61594]: DEBUG oslo_vmware.exceptions [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 809.862085] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f82261-5cc0-4d07-a49a-3784bc16a04b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.866382] env[61594]: INFO nova.compute.manager [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] [instance: 48b27adc-0bc7-44dd-9330-d0b30593f95f] Took 0.90 seconds to deallocate network for instance. [ 809.868154] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 77674803762c4a0ea773069e617daa49 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.869248] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.869810] env[61594]: ERROR nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 809.869810] env[61594]: Faults: ['InvalidArgument'] [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Traceback (most recent call last): [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] yield resources [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self.driver.spawn(context, instance, image_meta, [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self._fetch_image_if_missing(context, vi) [ 809.869810] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] image_cache(vi, tmp_image_ds_loc) [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] vm_util.copy_virtual_disk( [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] session._wait_for_task(vmdk_copy_task) [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] return self.wait_for_task(task_ref) [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] return evt.wait() [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] result = hub.switch() [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 809.870488] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] return self.greenlet.switch() [ 809.871142] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 809.871142] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self.f(*self.args, **self.kw) [ 809.871142] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 809.871142] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] raise exceptions.translate_fault(task_info.error) [ 809.871142] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 809.871142] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Faults: ['InvalidArgument'] [ 809.871142] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] [ 809.871142] env[61594]: INFO nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Terminating instance [ 809.872208] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.872208] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.872943] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a0de55d-f67e-4295-b619-468d80b1c6b8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.875899] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.876080] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquired lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.876251] env[61594]: DEBUG nova.network.neutron [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 809.876664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 1aecafee1dcf4f358ea5904d3d3d77a6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.885511] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1aecafee1dcf4f358ea5904d3d3d77a6 [ 809.886117] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.886576] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 60e4893014ab4255b9206d92ce541956 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.897731] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.898055] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 809.899033] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cf0acba-3320-43b5-8a0a-3e30380bf6a6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.905069] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Waiting for the task: (returnval){ [ 809.905069] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]528e46de-62e2-8108-155e-d3296b6f4600" [ 809.905069] env[61594]: _type = "Task" [ 809.905069] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.909327] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60e4893014ab4255b9206d92ce541956 [ 809.910143] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 809.913735] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 97bc4bf8e59a4017b0f0bfdb2b850871 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.921680] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 809.921680] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Creating directory with path [datastore1] vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 809.921947] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77674803762c4a0ea773069e617daa49 [ 809.922293] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb35b192-63b2-4b00-991a-5eeb96db9409 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.927083] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 6188534e93ca486aa9fa06826bd90e44 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.928183] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97bc4bf8e59a4017b0f0bfdb2b850871 [ 809.928724] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 809.928904] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.260s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.930382] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.939s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.931612] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 187a5ee623984f5280a5321bd5f8cf30 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 809.951152] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Created directory with path [datastore1] vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 809.951528] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Fetch image to [datastore1] vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 809.951705] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 809.952448] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83c57f6-1c55-44c6-b581-d6062c12d872 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.960553] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e5b6cc-bce1-4059-9d47-834fd2164642 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.969998] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6188534e93ca486aa9fa06826bd90e44 [ 809.971624] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e8092c-d5ad-458a-8d81-fe2f42a01b93 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.978497] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 187a5ee623984f5280a5321bd5f8cf30 [ 810.015300] env[61594]: INFO nova.scheduler.client.report [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Deleted allocations for instance 48b27adc-0bc7-44dd-9330-d0b30593f95f [ 810.021713] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829d2fbb-bd09-4abe-93f9-3f3db39af0d3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.024898] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Expecting reply to msg 857df3509ae04ea9a3e784963da56dd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.031500] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2d5bbf0d-84f2-471a-8a01-405fb9ed6baf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.042120] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 857df3509ae04ea9a3e784963da56dd1 [ 810.042769] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28ebfe0d-c051-4671-adaf-dd6d921582fc tempest-ServerActionsV293TestJSON-1369336445 tempest-ServerActionsV293TestJSON-1369336445-project-member] Lock "48b27adc-0bc7-44dd-9330-d0b30593f95f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.751s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.063098] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 810.084703] env[61594]: DEBUG nova.network.neutron [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 810.151367] env[61594]: DEBUG oslo_vmware.rw_handles [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 810.219646] env[61594]: DEBUG oslo_vmware.rw_handles [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 810.219646] env[61594]: DEBUG oslo_vmware.rw_handles [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 810.249106] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b44cc3-a27d-403c-83d8-40c21e48987f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.257476] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c2e47f-81fd-477d-9e0b-24807c897234 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.291705] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f2b6c5-a1ac-4849-93e2-3bfbfc7a420b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.300209] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05baf3e8-d935-4929-8fc2-b57170e9ac49 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.316267] env[61594]: DEBUG nova.compute.provider_tree [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.317142] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 6bff6e13e6364d08a27f548544604762 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.325914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bff6e13e6364d08a27f548544604762 [ 810.327031] env[61594]: DEBUG nova.scheduler.client.report [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 810.329329] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 1cafc2452eab41939d97726f7d7fa4dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.346946] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1cafc2452eab41939d97726f7d7fa4dd [ 810.347793] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.418s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.348453] env[61594]: ERROR nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Traceback (most recent call last): [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self.driver.spawn(context, instance, image_meta, [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] vm_ref = self.build_virtual_machine(instance, [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.348453] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] for vif in network_info: [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return self._sync_wrapper(fn, *args, **kwargs) [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self.wait() [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self[:] = self._gt.wait() [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return self._exit_event.wait() [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] result = hub.switch() [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 810.349038] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return self.greenlet.switch() [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] result = function(*args, **kwargs) [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] return func(*args, **kwargs) [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] raise e [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] nwinfo = self.network_api.allocate_for_instance( [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] created_port_ids = self._update_ports_for_instance( [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] with excutils.save_and_reraise_exception(): [ 810.349618] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] self.force_reraise() [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] raise self.value [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] updated_port = self._update_port( [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] _ensure_no_port_binding_failure(port) [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] raise exception.PortBindingFailed(port_id=port['id']) [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] nova.exception.PortBindingFailed: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. [ 810.350200] env[61594]: ERROR nova.compute.manager [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] [ 810.350938] env[61594]: DEBUG nova.compute.utils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 810.354033] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Build of instance 93ecfc04-23c5-41fc-babf-1a5bd02769b8 was re-scheduled: Binding failed for port 8b090fba-fb82-4365-b99c-e7d58baf8a68, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 810.354482] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 810.354723] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Acquiring lock "refresh_cache-93ecfc04-23c5-41fc-babf-1a5bd02769b8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.355202] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Acquired lock "refresh_cache-93ecfc04-23c5-41fc-babf-1a5bd02769b8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.355202] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.355457] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg a02a41e866a24638a6199c89050b4d08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.365466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a02a41e866a24638a6199c89050b4d08 [ 810.415329] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Successfully created port: 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.419290] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 810.813190] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Acquiring lock "362c5924-06e1-4385-a1f8-6b0556f7ba8b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.814053] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Lock "362c5924-06e1-4385-a1f8-6b0556f7ba8b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.814053] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 3c0a32f119084823a2653c79aaa3eac8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.843470] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c0a32f119084823a2653c79aaa3eac8 [ 810.843470] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 810.848097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg dea5b85fee944fabbb631b6309dc18d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.887920] env[61594]: DEBUG nova.network.neutron [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.888512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 9861b83a1e92473b81f27ae2ed007ac1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.890809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dea5b85fee944fabbb631b6309dc18d5 [ 810.901063] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9861b83a1e92473b81f27ae2ed007ac1 [ 810.901511] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.902924] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Releasing lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.902924] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 810.902924] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 810.903217] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.904715] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a02d655-5a31-4e6c-bced-201adc79c3f5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.908766] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.919658] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 810.922215] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e006f5dc-e9a9-47ad-bdad-2e4f7fe6a266 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.936915] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.937198] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 810.940186] env[61594]: INFO nova.compute.claims [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.943605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg a704d615beea4e598e05431d389c9a69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.958361] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 810.958587] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 810.958771] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Deleting the datastore file [datastore1] 07d2f1e7-c08e-434c-aea7-941ef75f16ba {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 810.959100] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3e21f73-bae6-4925-828d-433c98e202d3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.963422] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.964669] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 6f7919df5c2a4cd0b1be833fdb362467 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.972899] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Waiting for the task: (returnval){ [ 810.972899] env[61594]: value = "task-1291405" [ 810.972899] env[61594]: _type = "Task" [ 810.972899] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.977326] env[61594]: ERROR nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. [ 810.977326] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 810.977326] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 810.977326] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 810.977326] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.977326] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 810.977326] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.977326] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 810.977326] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.977326] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 810.977326] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.977326] env[61594]: ERROR nova.compute.manager raise self.value [ 810.977326] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.977326] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 810.977326] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.977326] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 810.977913] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.977913] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 810.977913] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. [ 810.977913] env[61594]: ERROR nova.compute.manager [ 810.977913] env[61594]: Traceback (most recent call last): [ 810.977913] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 810.977913] env[61594]: listener.cb(fileno) [ 810.977913] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 810.977913] env[61594]: result = function(*args, **kwargs) [ 810.977913] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.977913] env[61594]: return func(*args, **kwargs) [ 810.977913] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 810.977913] env[61594]: raise e [ 810.977913] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 810.977913] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 810.977913] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.977913] env[61594]: created_port_ids = self._update_ports_for_instance( [ 810.977913] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.977913] env[61594]: with excutils.save_and_reraise_exception(): [ 810.977913] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.977913] env[61594]: self.force_reraise() [ 810.977913] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.977913] env[61594]: raise self.value [ 810.977913] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.977913] env[61594]: updated_port = self._update_port( [ 810.977913] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.977913] env[61594]: _ensure_no_port_binding_failure(port) [ 810.977913] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.977913] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 810.978845] env[61594]: nova.exception.PortBindingFailed: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. [ 810.978845] env[61594]: Removing descriptor: 20 [ 810.978845] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f7919df5c2a4cd0b1be833fdb362467 [ 810.978969] env[61594]: ERROR nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Traceback (most recent call last): [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] yield resources [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self.driver.spawn(context, instance, image_meta, [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] vm_ref = self.build_virtual_machine(instance, [ 810.978969] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] for vif in network_info: [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return self._sync_wrapper(fn, *args, **kwargs) [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self.wait() [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self[:] = self._gt.wait() [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return self._exit_event.wait() [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 810.979350] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] result = hub.switch() [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return self.greenlet.switch() [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] result = function(*args, **kwargs) [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return func(*args, **kwargs) [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] raise e [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] nwinfo = self.network_api.allocate_for_instance( [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] created_port_ids = self._update_ports_for_instance( [ 810.979769] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] with excutils.save_and_reraise_exception(): [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self.force_reraise() [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] raise self.value [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] updated_port = self._update_port( [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] _ensure_no_port_binding_failure(port) [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] raise exception.PortBindingFailed(port_id=port['id']) [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] nova.exception.PortBindingFailed: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. [ 810.980268] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] [ 810.980704] env[61594]: INFO nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Terminating instance [ 810.980961] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Releasing lock "refresh_cache-93ecfc04-23c5-41fc-babf-1a5bd02769b8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.981180] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 810.981354] env[61594]: DEBUG nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 810.981514] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 810.983571] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.983618] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.983740] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.984207] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 17551119279844439ade1dc555d1c330 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.988970] env[61594]: ERROR nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. [ 810.988970] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 810.988970] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 810.988970] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 810.988970] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.988970] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 810.988970] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.988970] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 810.988970] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.988970] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 810.988970] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.988970] env[61594]: ERROR nova.compute.manager raise self.value [ 810.988970] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.988970] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 810.988970] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.988970] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 810.989598] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.989598] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 810.989598] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. [ 810.989598] env[61594]: ERROR nova.compute.manager [ 810.989598] env[61594]: Traceback (most recent call last): [ 810.989598] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 810.989598] env[61594]: listener.cb(fileno) [ 810.989598] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 810.989598] env[61594]: result = function(*args, **kwargs) [ 810.989598] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.989598] env[61594]: return func(*args, **kwargs) [ 810.989598] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 810.989598] env[61594]: raise e [ 810.989598] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 810.989598] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 810.989598] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.989598] env[61594]: created_port_ids = self._update_ports_for_instance( [ 810.989598] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.989598] env[61594]: with excutils.save_and_reraise_exception(): [ 810.989598] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.989598] env[61594]: self.force_reraise() [ 810.989598] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.989598] env[61594]: raise self.value [ 810.989598] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.989598] env[61594]: updated_port = self._update_port( [ 810.989598] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.989598] env[61594]: _ensure_no_port_binding_failure(port) [ 810.989598] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.989598] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 810.990484] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. [ 810.990484] env[61594]: Removing descriptor: 17 [ 810.990484] env[61594]: ERROR nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Traceback (most recent call last): [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] yield resources [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self.driver.spawn(context, instance, image_meta, [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.990484] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] vm_ref = self.build_virtual_machine(instance, [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] for vif in network_info: [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return self._sync_wrapper(fn, *args, **kwargs) [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self.wait() [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self[:] = self._gt.wait() [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return self._exit_event.wait() [ 810.990922] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] result = hub.switch() [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return self.greenlet.switch() [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] result = function(*args, **kwargs) [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return func(*args, **kwargs) [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] raise e [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] nwinfo = self.network_api.allocate_for_instance( [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.991350] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] created_port_ids = self._update_ports_for_instance( [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] with excutils.save_and_reraise_exception(): [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self.force_reraise() [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] raise self.value [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] updated_port = self._update_port( [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] _ensure_no_port_binding_failure(port) [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.991768] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] raise exception.PortBindingFailed(port_id=port['id']) [ 810.993295] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] nova.exception.PortBindingFailed: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. [ 810.993295] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] [ 810.993295] env[61594]: INFO nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Terminating instance [ 810.995847] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Task: {'id': task-1291405, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.995847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a704d615beea4e598e05431d389c9a69 [ 810.996015] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-e600280c-2414-420d-bc8d-6e3e7979fccf" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.996122] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-e600280c-2414-420d-bc8d-6e3e7979fccf" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.996282] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.996691] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 9dea1e0c414c4058b7bd3939623f4c51 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.998687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg fdeda86d908e493f8b405ad49441ffbc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 810.999705] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17551119279844439ade1dc555d1c330 [ 811.005522] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9dea1e0c414c4058b7bd3939623f4c51 [ 811.009725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdeda86d908e493f8b405ad49441ffbc [ 811.052781] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.053519] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 66580cef92d744aa91195910588dbcd8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.068713] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66580cef92d744aa91195910588dbcd8 [ 811.069268] env[61594]: DEBUG nova.network.neutron [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.069740] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 96014afcb05941cf9d8f50f809df4939 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.082845] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.088590] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.090593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96014afcb05941cf9d8f50f809df4939 [ 811.091194] env[61594]: INFO nova.compute.manager [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] [instance: 93ecfc04-23c5-41fc-babf-1a5bd02769b8] Took 0.11 seconds to deallocate network for instance. [ 811.095128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg 409d5fc9cdda4bd78ded9c112863440b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.136981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 409d5fc9cdda4bd78ded9c112863440b [ 811.139450] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg dcd18eb3a0ac42d489b42a2c6302d387 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.178223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcd18eb3a0ac42d489b42a2c6302d387 [ 811.219029] env[61594]: INFO nova.scheduler.client.report [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Deleted allocations for instance 93ecfc04-23c5-41fc-babf-1a5bd02769b8 [ 811.226369] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Expecting reply to msg a8685452254e45efbf59157ec1810007 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.243055] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f1a5e6-7c68-4c0b-a5f7-c7cd1ae4eef4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.244567] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8685452254e45efbf59157ec1810007 [ 811.245157] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2b31308d-4f3c-47db-90b3-3b2f0b221688 tempest-ServersV294TestFqdnHostnames-1183918085 tempest-ServersV294TestFqdnHostnames-1183918085-project-member] Lock "93ecfc04-23c5-41fc-babf-1a5bd02769b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.902s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.253018] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b880b20-aaeb-444d-b35a-c23cda16c770 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.292706] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50aeadc8-ffa2-48af-91d3-4f980537c56b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.300916] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef7dcfb-7a0b-4f94-910f-f3b99fbcfff4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.314937] env[61594]: DEBUG nova.compute.provider_tree [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.315469] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg bb9e2fa9f9694da9ad54806b098c93f5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.325050] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb9e2fa9f9694da9ad54806b098c93f5 [ 811.329442] env[61594]: DEBUG nova.scheduler.client.report [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 811.329442] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 13c716e7a0b44074861572c832235c1d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.348240] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 13c716e7a0b44074861572c832235c1d [ 811.349088] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.412s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.349582] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 811.351635] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 4db35667cd0c41ac8326b4eb1b3ec086 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.406648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4db35667cd0c41ac8326b4eb1b3ec086 [ 811.408326] env[61594]: DEBUG nova.compute.utils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 811.409223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 6cbcd02336d64072b5c74faca31d50f1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.411787] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 811.411978] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 811.420179] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6cbcd02336d64072b5c74faca31d50f1 [ 811.420853] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 811.422667] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 4e8af03df50843e5b6e2ae453616f2e7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.459679] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e8af03df50843e5b6e2ae453616f2e7 [ 811.463276] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 5dbdd31883b04214b18b62add079140b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.468722] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.469197] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 0b0c3257ee604e6b9af55ea8eefef9bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.478092] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b0c3257ee604e6b9af55ea8eefef9bb [ 811.478678] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-e600280c-2414-420d-bc8d-6e3e7979fccf" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.479095] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 811.479455] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 811.480438] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0d91eb3d-37e6-4476-a255-8dbe5465263f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.486638] env[61594]: DEBUG oslo_vmware.api [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Task: {'id': task-1291405, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041441} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.487704] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.488151] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2a171778760d4c88b1930a85c00b338b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.489319] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.489534] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 811.490060] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 811.490060] env[61594]: INFO nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Took 0.59 seconds to destroy the instance on the hypervisor. [ 811.490243] env[61594]: DEBUG oslo.service.loopingcall [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.490718] env[61594]: DEBUG nova.compute.manager [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 811.492831] env[61594]: DEBUG nova.compute.claims [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 811.493010] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.493267] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.495174] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 02bd023c2f854784b090d42a79169ed0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.499664] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86fc91c-6f17-4d45-86f9-da4d73bfecec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.511159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a171778760d4c88b1930a85c00b338b [ 811.511814] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.512201] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 811.512389] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 811.513265] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af84b48b-84f3-4154-866c-e6aa71200ed9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.518271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5dbdd31883b04214b18b62add079140b [ 811.518498] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 811.527152] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e600280c-2414-420d-bc8d-6e3e7979fccf could not be found. [ 811.527373] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 811.528609] env[61594]: INFO nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Took 0.05 seconds to destroy the instance on the hypervisor. [ 811.528609] env[61594]: DEBUG oslo.service.loopingcall [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.529967] env[61594]: DEBUG nova.compute.manager [-] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 811.529967] env[61594]: DEBUG nova.network.neutron [-] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 811.534605] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685a7ad8-9243-4197-954a-fa94156285a5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.548422] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02bd023c2f854784b090d42a79169ed0 [ 811.549245] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.560042] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 811.560301] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 811.560492] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.560685] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 811.560831] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.560974] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 811.561192] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 811.561360] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 811.561519] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 811.561701] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 811.561889] env[61594]: DEBUG nova.virt.hardware [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 811.563065] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847cbd07-9610-454c-8c74-3a456bf6d6ad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.571182] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3 could not be found. [ 811.571409] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 811.571592] env[61594]: INFO nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Took 0.06 seconds to destroy the instance on the hypervisor. [ 811.571835] env[61594]: DEBUG oslo.service.loopingcall [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.575603] env[61594]: DEBUG nova.policy [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0999e1e5803d4d40bfc1145b45da8085', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18ca4e49b7d7415783135687d8109808', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 811.577626] env[61594]: DEBUG nova.compute.manager [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 811.577725] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 811.582815] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f6ebc4-ee8c-4e1f-baed-e3f08ad0b71d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.589649] env[61594]: DEBUG nova.network.neutron [-] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.590133] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2ab316cb856b4509947bddf33be21e1c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.601853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ab316cb856b4509947bddf33be21e1c [ 811.601853] env[61594]: DEBUG nova.network.neutron [-] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.602101] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d0120bf2f9a44a70892d53b63f45382c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.610535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0120bf2f9a44a70892d53b63f45382c [ 811.610963] env[61594]: INFO nova.compute.manager [-] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Took 0.08 seconds to deallocate network for instance. [ 811.612850] env[61594]: DEBUG nova.compute.claims [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 811.613226] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.620661] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.621104] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c00e4781c80d4390b97877d18fac01b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.633162] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c00e4781c80d4390b97877d18fac01b1 [ 811.633810] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.634190] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4bdac743f2e94cb19ef852ed78cf28e9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.645996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bdac743f2e94cb19ef852ed78cf28e9 [ 811.646485] env[61594]: INFO nova.compute.manager [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Took 0.07 seconds to deallocate network for instance. [ 811.648503] env[61594]: DEBUG nova.compute.claims [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 811.648712] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 811.711951] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4b0453-fda2-4dca-8d7b-5f27c195c695 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.720543] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac903dc-6f54-4549-ba1d-f836c1be4923 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.751815] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a600476b-d510-453e-8d8c-d0162b7fcbe0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.760083] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49c56ce-2a2b-4143-b890-e26558579e31 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.773997] env[61594]: DEBUG nova.compute.provider_tree [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 811.774535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 9ec5e96c803e4b5dbb7291885eb27d41 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.783526] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ec5e96c803e4b5dbb7291885eb27d41 [ 811.784502] env[61594]: DEBUG nova.scheduler.client.report [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 811.786809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 635f916d68394d67ac8256703b9cad86 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.797910] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 635f916d68394d67ac8256703b9cad86 [ 811.798731] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.305s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.799290] env[61594]: ERROR nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 811.799290] env[61594]: Faults: ['InvalidArgument'] [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Traceback (most recent call last): [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self.driver.spawn(context, instance, image_meta, [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self._fetch_image_if_missing(context, vi) [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] image_cache(vi, tmp_image_ds_loc) [ 811.799290] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] vm_util.copy_virtual_disk( [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] session._wait_for_task(vmdk_copy_task) [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] return self.wait_for_task(task_ref) [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] return evt.wait() [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] result = hub.switch() [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] return self.greenlet.switch() [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 811.799709] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] self.f(*self.args, **self.kw) [ 811.800129] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 811.800129] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] raise exceptions.translate_fault(task_info.error) [ 811.800129] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 811.800129] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Faults: ['InvalidArgument'] [ 811.800129] env[61594]: ERROR nova.compute.manager [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] [ 811.800129] env[61594]: DEBUG nova.compute.utils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 811.801246] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.188s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.803070] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 7287eab9226d4e5d9927283b75b31272 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.804375] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Build of instance 07d2f1e7-c08e-434c-aea7-941ef75f16ba was re-scheduled: A specified parameter was not correct: fileType [ 811.804375] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 811.805204] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 811.805204] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.805339] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquired lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.805415] env[61594]: DEBUG nova.network.neutron [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 811.805781] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg c31dcf94f59b4b56ba993528062db4fd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 811.812300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c31dcf94f59b4b56ba993528062db4fd [ 811.840075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7287eab9226d4e5d9927283b75b31272 [ 811.848593] env[61594]: DEBUG nova.network.neutron [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.997105] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4698eeb-d8b5-40da-9db4-48e1f5ebab6c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.007477] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ce746e-6221-49f5-9b3a-d2d75c60a7bc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.036446] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c4d386-5035-4522-891f-db91bd23426e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.045288] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0913bc-c3ed-4200-9bcb-b937237c135c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.060497] env[61594]: DEBUG nova.compute.provider_tree [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.061022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 602c10585628437eb856700fff6bd5fb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.068814] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 602c10585628437eb856700fff6bd5fb [ 812.069771] env[61594]: DEBUG nova.scheduler.client.report [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 812.072129] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg d769a85a700f458e8dc19da738caae7a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.095701] env[61594]: DEBUG nova.network.neutron [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.096280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 36413a1a3a034b9a881932fb00ebfe1d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.100444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d769a85a700f458e8dc19da738caae7a [ 812.100444] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.298s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.100444] env[61594]: ERROR nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. [ 812.100444] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Traceback (most recent call last): [ 812.100444] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 812.100444] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self.driver.spawn(context, instance, image_meta, [ 812.100444] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.100444] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.100444] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] vm_ref = self.build_virtual_machine(instance, [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] for vif in network_info: [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return self._sync_wrapper(fn, *args, **kwargs) [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self.wait() [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self[:] = self._gt.wait() [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 812.100933] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return self._exit_event.wait() [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] result = hub.switch() [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return self.greenlet.switch() [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] result = function(*args, **kwargs) [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] return func(*args, **kwargs) [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] raise e [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] nwinfo = self.network_api.allocate_for_instance( [ 812.101296] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] created_port_ids = self._update_ports_for_instance( [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] with excutils.save_and_reraise_exception(): [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] self.force_reraise() [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] raise self.value [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] updated_port = self._update_port( [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] _ensure_no_port_binding_failure(port) [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.101682] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] raise exception.PortBindingFailed(port_id=port['id']) [ 812.102097] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] nova.exception.PortBindingFailed: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. [ 812.102097] env[61594]: ERROR nova.compute.manager [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] [ 812.102097] env[61594]: DEBUG nova.compute.utils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 812.102097] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.453s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.103469] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 88756256666743ab81e008ea294d3ed4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.104593] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Build of instance e600280c-2414-420d-bc8d-6e3e7979fccf was re-scheduled: Binding failed for port 99a82fe7-f10a-4919-91d9-fdde7ed0a044, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 812.105029] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 812.105246] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-e600280c-2414-420d-bc8d-6e3e7979fccf" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.105389] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-e600280c-2414-420d-bc8d-6e3e7979fccf" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.105542] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.105887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 95dbc4ad11dc445683d30f042d42a2da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.108073] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36413a1a3a034b9a881932fb00ebfe1d [ 812.109028] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Releasing lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.109028] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 812.109028] env[61594]: DEBUG nova.compute.manager [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 812.110434] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 8df98fdd4fe5477eae2e822e17a5fb6f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.113578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95dbc4ad11dc445683d30f042d42a2da [ 812.121401] env[61594]: ERROR nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. [ 812.121401] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 812.121401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.121401] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 812.121401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.121401] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 812.121401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.121401] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 812.121401] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.121401] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 812.121401] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.121401] env[61594]: ERROR nova.compute.manager raise self.value [ 812.121401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.121401] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 812.121401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.121401] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 812.121895] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.121895] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 812.121895] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. [ 812.121895] env[61594]: ERROR nova.compute.manager [ 812.121895] env[61594]: Traceback (most recent call last): [ 812.121895] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 812.121895] env[61594]: listener.cb(fileno) [ 812.121895] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 812.121895] env[61594]: result = function(*args, **kwargs) [ 812.121895] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 812.121895] env[61594]: return func(*args, **kwargs) [ 812.121895] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 812.121895] env[61594]: raise e [ 812.121895] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.121895] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 812.121895] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.121895] env[61594]: created_port_ids = self._update_ports_for_instance( [ 812.121895] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.121895] env[61594]: with excutils.save_and_reraise_exception(): [ 812.121895] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.121895] env[61594]: self.force_reraise() [ 812.121895] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.121895] env[61594]: raise self.value [ 812.121895] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.121895] env[61594]: updated_port = self._update_port( [ 812.121895] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.121895] env[61594]: _ensure_no_port_binding_failure(port) [ 812.121895] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.121895] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 812.122736] env[61594]: nova.exception.PortBindingFailed: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. [ 812.122736] env[61594]: Removing descriptor: 25 [ 812.122736] env[61594]: ERROR nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Traceback (most recent call last): [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] yield resources [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self.driver.spawn(context, instance, image_meta, [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.122736] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] vm_ref = self.build_virtual_machine(instance, [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] for vif in network_info: [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return self._sync_wrapper(fn, *args, **kwargs) [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self.wait() [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self[:] = self._gt.wait() [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return self._exit_event.wait() [ 812.123353] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] result = hub.switch() [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return self.greenlet.switch() [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] result = function(*args, **kwargs) [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return func(*args, **kwargs) [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] raise e [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] nwinfo = self.network_api.allocate_for_instance( [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.123862] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] created_port_ids = self._update_ports_for_instance( [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] with excutils.save_and_reraise_exception(): [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self.force_reraise() [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] raise self.value [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] updated_port = self._update_port( [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] _ensure_no_port_binding_failure(port) [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.124269] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] raise exception.PortBindingFailed(port_id=port['id']) [ 812.124647] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] nova.exception.PortBindingFailed: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. [ 812.124647] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] [ 812.124647] env[61594]: INFO nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Terminating instance [ 812.125483] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Acquiring lock "refresh_cache-500259b0-a57d-43ff-9c88-46190b6a3a10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.125645] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Acquired lock "refresh_cache-500259b0-a57d-43ff-9c88-46190b6a3a10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.125815] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.126242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 720f75345a9644888aea6dee49ddab04 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.140880] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 720f75345a9644888aea6dee49ddab04 [ 812.154547] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.167396] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88756256666743ab81e008ea294d3ed4 [ 812.187382] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8df98fdd4fe5477eae2e822e17a5fb6f [ 812.190108] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 20d7055760e843499d379ebf7c82b1fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.192670] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.225714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20d7055760e843499d379ebf7c82b1fc [ 812.256015] env[61594]: INFO nova.scheduler.client.report [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Deleted allocations for instance 07d2f1e7-c08e-434c-aea7-941ef75f16ba [ 812.262520] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 703b192a3ca44c858842cc9cb6c22f68 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.290088] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 703b192a3ca44c858842cc9cb6c22f68 [ 812.290805] env[61594]: DEBUG oslo_concurrency.lockutils [None req-85685b95-ee2c-4253-83c5-dec83d6d5968 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.288s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.291386] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 44.604s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.291386] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.291816] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.291816] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.294441] env[61594]: INFO nova.compute.manager [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Terminating instance [ 812.302705] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquiring lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.302815] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Acquired lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.303151] env[61594]: DEBUG nova.network.neutron [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.303668] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 5bee59ecce2744208b68c552cf891b63 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.324543] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bee59ecce2744208b68c552cf891b63 [ 812.350598] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a345b54-2290-4639-9ff0-9e2edf9a3a3a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.359809] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d912525a-b7ac-4220-8f58-e6a0f2e01284 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.398978] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af5260b-53da-4704-8b79-ca8db9e98a06 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.411992] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc74e31-b1e5-4cc9-a146-6514cdad7c76 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.432678] env[61594]: DEBUG nova.compute.provider_tree [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.433681] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 9bdfbe7571014bf9a9c91c6720e87f1c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.445764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bdfbe7571014bf9a9c91c6720e87f1c [ 812.447214] env[61594]: DEBUG nova.scheduler.client.report [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 812.450270] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ee442309703b48aea874baee46be9e11 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.458025] env[61594]: DEBUG nova.network.neutron [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.465407] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee442309703b48aea874baee46be9e11 [ 812.466285] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.365s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 812.467088] env[61594]: ERROR nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Traceback (most recent call last): [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self.driver.spawn(context, instance, image_meta, [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] vm_ref = self.build_virtual_machine(instance, [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.467088] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] for vif in network_info: [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return self._sync_wrapper(fn, *args, **kwargs) [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self.wait() [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self[:] = self._gt.wait() [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return self._exit_event.wait() [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] result = hub.switch() [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.467457] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return self.greenlet.switch() [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] result = function(*args, **kwargs) [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] return func(*args, **kwargs) [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] raise e [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] nwinfo = self.network_api.allocate_for_instance( [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] created_port_ids = self._update_ports_for_instance( [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] with excutils.save_and_reraise_exception(): [ 812.467807] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] self.force_reraise() [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] raise self.value [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] updated_port = self._update_port( [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] _ensure_no_port_binding_failure(port) [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] raise exception.PortBindingFailed(port_id=port['id']) [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] nova.exception.PortBindingFailed: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. [ 812.468192] env[61594]: ERROR nova.compute.manager [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] [ 812.468574] env[61594]: DEBUG nova.compute.utils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 812.470185] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Build of instance 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3 was re-scheduled: Binding failed for port d3a45c31-7ab7-489a-aac0-8e6c223de0e7, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 812.470716] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 812.470982] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.471185] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.471383] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.471853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 26ce7a952ef24a47b46032d0594a8ba7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.482730] env[61594]: ERROR nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. [ 812.482730] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 812.482730] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.482730] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 812.482730] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.482730] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 812.482730] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.482730] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 812.482730] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.482730] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 812.482730] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.482730] env[61594]: ERROR nova.compute.manager raise self.value [ 812.482730] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.482730] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 812.482730] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.482730] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 812.483236] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.483236] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 812.483236] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. [ 812.483236] env[61594]: ERROR nova.compute.manager [ 812.483236] env[61594]: Traceback (most recent call last): [ 812.483236] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 812.483236] env[61594]: listener.cb(fileno) [ 812.483236] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 812.483236] env[61594]: result = function(*args, **kwargs) [ 812.483236] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 812.483236] env[61594]: return func(*args, **kwargs) [ 812.483236] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 812.483236] env[61594]: raise e [ 812.483236] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.483236] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 812.483236] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.483236] env[61594]: created_port_ids = self._update_ports_for_instance( [ 812.483236] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.483236] env[61594]: with excutils.save_and_reraise_exception(): [ 812.483236] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.483236] env[61594]: self.force_reraise() [ 812.483236] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.483236] env[61594]: raise self.value [ 812.483236] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.483236] env[61594]: updated_port = self._update_port( [ 812.483236] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.483236] env[61594]: _ensure_no_port_binding_failure(port) [ 812.483236] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.483236] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 812.484125] env[61594]: nova.exception.PortBindingFailed: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. [ 812.484125] env[61594]: Removing descriptor: 23 [ 812.484125] env[61594]: ERROR nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Traceback (most recent call last): [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] yield resources [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self.driver.spawn(context, instance, image_meta, [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 812.484125] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] vm_ref = self.build_virtual_machine(instance, [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] vif_infos = vmwarevif.get_vif_info(self._session, [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] for vif in network_info: [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return self._sync_wrapper(fn, *args, **kwargs) [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self.wait() [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self[:] = self._gt.wait() [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return self._exit_event.wait() [ 812.484515] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] result = hub.switch() [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return self.greenlet.switch() [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] result = function(*args, **kwargs) [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return func(*args, **kwargs) [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] raise e [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] nwinfo = self.network_api.allocate_for_instance( [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 812.484942] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] created_port_ids = self._update_ports_for_instance( [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] with excutils.save_and_reraise_exception(): [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self.force_reraise() [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] raise self.value [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] updated_port = self._update_port( [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] _ensure_no_port_binding_failure(port) [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 812.485397] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] raise exception.PortBindingFailed(port_id=port['id']) [ 812.485868] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] nova.exception.PortBindingFailed: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. [ 812.485868] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] [ 812.485868] env[61594]: INFO nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Terminating instance [ 812.486605] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-4ad1a310-5786-4bb6-87ff-72069f692eff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 812.486689] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-4ad1a310-5786-4bb6-87ff-72069f692eff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.486860] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 812.487545] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 5750656c20ee4aecb941011751291c13 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.488656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26ce7a952ef24a47b46032d0594a8ba7 [ 812.498560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5750656c20ee4aecb941011751291c13 [ 812.582445] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.620507] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.705810] env[61594]: DEBUG nova.network.neutron [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.706550] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 015ffabde2094f7eaffea3898089c432 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.717240] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 015ffabde2094f7eaffea3898089c432 [ 812.717862] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Releasing lock "refresh_cache-07d2f1e7-c08e-434c-aea7-941ef75f16ba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.718264] env[61594]: DEBUG nova.compute.manager [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 812.718485] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.719019] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bff3ccc8-6dc7-4bc7-b536-9601df3880cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.730508] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8589c0b-db6a-48d4-baad-c1823277877e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.756193] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 07d2f1e7-c08e-434c-aea7-941ef75f16ba could not be found. [ 812.756430] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 812.756617] env[61594]: INFO nova.compute.manager [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Took 0.04 seconds to destroy the instance on the hypervisor. [ 812.756864] env[61594]: DEBUG oslo.service.loopingcall [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.757084] env[61594]: DEBUG nova.compute.manager [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 812.757178] env[61594]: DEBUG nova.network.neutron [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.786142] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.786659] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 9ac937833191412d96e629422becaa64 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.797475] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ac937833191412d96e629422becaa64 [ 812.798123] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-e600280c-2414-420d-bc8d-6e3e7979fccf" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.798538] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 812.798589] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 812.798745] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.801161] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.801663] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg e60ae187a24c4838af82579256f54ded in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.809942] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e60ae187a24c4838af82579256f54ded [ 812.810574] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Releasing lock "refresh_cache-500259b0-a57d-43ff-9c88-46190b6a3a10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.810845] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 812.811049] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 812.811562] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ec3a4f78-a1cb-445d-a3f8-ab0f9269c026 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.815145] env[61594]: DEBUG nova.network.neutron [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.815633] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cec256b3a8604db8bb2d10f0e9c6b844 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.822164] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ab14ea-cdd6-4ed2-8214-1af10c8a1d12 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.832583] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cec256b3a8604db8bb2d10f0e9c6b844 [ 812.832990] env[61594]: DEBUG nova.network.neutron [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.833398] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg af0a96b999284b04bcd09dc05035938e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.847639] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 500259b0-a57d-43ff-9c88-46190b6a3a10 could not be found. [ 812.847835] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 812.848017] env[61594]: INFO nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Took 0.04 seconds to destroy the instance on the hypervisor. [ 812.848283] env[61594]: DEBUG oslo.service.loopingcall [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 812.848777] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af0a96b999284b04bcd09dc05035938e [ 812.849149] env[61594]: DEBUG nova.compute.manager [-] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 812.849226] env[61594]: DEBUG nova.network.neutron [-] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.855081] env[61594]: INFO nova.compute.manager [-] [instance: 07d2f1e7-c08e-434c-aea7-941ef75f16ba] Took 0.10 seconds to deallocate network for instance. [ 812.855936] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.856496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 9712ece8f84b436680101a8aeb72ae6f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.862708] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg cf39bc21af5d4d5fb0eaa2f4530ecf8a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.868233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9712ece8f84b436680101a8aeb72ae6f [ 812.868233] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.868907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 94e35252d03d4706ba7d6ca008c4d8e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.879195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94e35252d03d4706ba7d6ca008c4d8e0 [ 812.879776] env[61594]: INFO nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: e600280c-2414-420d-bc8d-6e3e7979fccf] Took 0.08 seconds to deallocate network for instance. [ 812.881481] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg f0b487447aaa42c9bb0323670ab89b9d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.918371] env[61594]: DEBUG nova.network.neutron [-] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.918371] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e824e505b24142ada0a4922211c7c49d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.924510] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf39bc21af5d4d5fb0eaa2f4530ecf8a [ 812.928215] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e824e505b24142ada0a4922211c7c49d [ 812.929643] env[61594]: DEBUG nova.network.neutron [-] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.929643] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8029887c0d8c4e9bafa64edb47ca8f07 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.945181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg 2a6f9ab0b033443d989c3a3600940c17 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.947981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0b487447aaa42c9bb0323670ab89b9d [ 812.948421] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8029887c0d8c4e9bafa64edb47ca8f07 [ 812.952137] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 07b7fa272c8346658ba8cdf4d39fbfe9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.953134] env[61594]: INFO nova.compute.manager [-] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Took 0.10 seconds to deallocate network for instance. [ 812.956359] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Successfully created port: b183ba56-040d-4b47-8d9f-2483248eb675 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 812.961787] env[61594]: DEBUG nova.compute.claims [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 812.962027] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.962283] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 812.964127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 44c7785456984c289d4686a24fbdeffc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 812.992042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07b7fa272c8346658ba8cdf4d39fbfe9 [ 813.028329] env[61594]: INFO nova.scheduler.client.report [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Deleted allocations for instance e600280c-2414-420d-bc8d-6e3e7979fccf [ 813.034522] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 175a735ad4da4c7d881cb45ee76eb009 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.052035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44c7785456984c289d4686a24fbdeffc [ 813.064923] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a6f9ab0b033443d989c3a3600940c17 [ 813.069241] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Lock "07d2f1e7-c08e-434c-aea7-941ef75f16ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.778s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.070210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b2985c4c-90f3-4dfc-bf29-511d8daf24c1 tempest-ServersListShow296Test-863923347 tempest-ServersListShow296Test-863923347-project-member] Expecting reply to msg bd0a3b9904a149cabe7ab7666ca283bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.072798] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 175a735ad4da4c7d881cb45ee76eb009 [ 813.073271] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "e600280c-2414-420d-bc8d-6e3e7979fccf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.742s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.093648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd0a3b9904a149cabe7ab7666ca283bd [ 813.180290] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e2a3f7-ec5a-4b30-a0ca-366a143252ce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.189643] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b135c6f3-ea92-477c-8673-9ad1cd8cc762 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.197018] env[61594]: ERROR nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. [ 813.197018] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 813.197018] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 813.197018] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 813.197018] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.197018] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 813.197018] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.197018] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 813.197018] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.197018] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 813.197018] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.197018] env[61594]: ERROR nova.compute.manager raise self.value [ 813.197018] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.197018] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 813.197018] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.197018] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 813.197575] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.197575] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 813.197575] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. [ 813.197575] env[61594]: ERROR nova.compute.manager [ 813.197575] env[61594]: Traceback (most recent call last): [ 813.197575] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 813.197575] env[61594]: listener.cb(fileno) [ 813.197575] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 813.197575] env[61594]: result = function(*args, **kwargs) [ 813.197575] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 813.197575] env[61594]: return func(*args, **kwargs) [ 813.197575] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 813.197575] env[61594]: raise e [ 813.197575] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 813.197575] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 813.197575] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.197575] env[61594]: created_port_ids = self._update_ports_for_instance( [ 813.197575] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.197575] env[61594]: with excutils.save_and_reraise_exception(): [ 813.197575] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.197575] env[61594]: self.force_reraise() [ 813.197575] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.197575] env[61594]: raise self.value [ 813.197575] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.197575] env[61594]: updated_port = self._update_port( [ 813.197575] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.197575] env[61594]: _ensure_no_port_binding_failure(port) [ 813.197575] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.197575] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 813.198417] env[61594]: nova.exception.PortBindingFailed: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. [ 813.198417] env[61594]: Removing descriptor: 21 [ 813.198417] env[61594]: ERROR nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Traceback (most recent call last): [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] yield resources [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self.driver.spawn(context, instance, image_meta, [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 813.198417] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] vm_ref = self.build_virtual_machine(instance, [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] vif_infos = vmwarevif.get_vif_info(self._session, [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] for vif in network_info: [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return self._sync_wrapper(fn, *args, **kwargs) [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self.wait() [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self[:] = self._gt.wait() [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return self._exit_event.wait() [ 813.198864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] result = hub.switch() [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return self.greenlet.switch() [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] result = function(*args, **kwargs) [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return func(*args, **kwargs) [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] raise e [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] nwinfo = self.network_api.allocate_for_instance( [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.199330] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] created_port_ids = self._update_ports_for_instance( [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] with excutils.save_and_reraise_exception(): [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self.force_reraise() [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] raise self.value [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] updated_port = self._update_port( [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] _ensure_no_port_binding_failure(port) [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.199701] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] raise exception.PortBindingFailed(port_id=port['id']) [ 813.200060] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] nova.exception.PortBindingFailed: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. [ 813.200060] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] [ 813.200060] env[61594]: INFO nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Terminating instance [ 813.202785] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "refresh_cache-f7d0db3d-9247-409a-bf1e-4b53c2368ddc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.202785] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquired lock "refresh_cache-f7d0db3d-9247-409a-bf1e-4b53c2368ddc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.202785] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.202785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 108ccc3120bd4a728e8ab7f72a6dafc4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.233502] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 108ccc3120bd4a728e8ab7f72a6dafc4 [ 813.236492] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d68ada2-6f4e-4855-8647-cd7091bf5a08 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.245347] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96b50c3c-0e4a-4e63-848c-1c79a3adbc3f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.259308] env[61594]: DEBUG nova.compute.provider_tree [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.259822] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 2218c4869737412d9a73a287144ee383 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.272257] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2218c4869737412d9a73a287144ee383 [ 813.272257] env[61594]: DEBUG nova.scheduler.client.report [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 813.277022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg ec7953b62f1647e5aa627a253ae0956a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.283559] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.286711] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec7953b62f1647e5aa627a253ae0956a [ 813.287755] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.325s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.291294] env[61594]: ERROR nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Traceback (most recent call last): [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self.driver.spawn(context, instance, image_meta, [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self._vmops.spawn(context, instance, image_meta, injected_files, [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] vm_ref = self.build_virtual_machine(instance, [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] vif_infos = vmwarevif.get_vif_info(self._session, [ 813.291294] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] for vif in network_info: [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return self._sync_wrapper(fn, *args, **kwargs) [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self.wait() [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self[:] = self._gt.wait() [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return self._exit_event.wait() [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] result = hub.switch() [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 813.291841] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return self.greenlet.switch() [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] result = function(*args, **kwargs) [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] return func(*args, **kwargs) [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] raise e [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] nwinfo = self.network_api.allocate_for_instance( [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] created_port_ids = self._update_ports_for_instance( [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] with excutils.save_and_reraise_exception(): [ 813.292484] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] self.force_reraise() [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] raise self.value [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] updated_port = self._update_port( [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] _ensure_no_port_binding_failure(port) [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] raise exception.PortBindingFailed(port_id=port['id']) [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] nova.exception.PortBindingFailed: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. [ 813.293020] env[61594]: ERROR nova.compute.manager [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] [ 813.293322] env[61594]: DEBUG nova.compute.utils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 813.293322] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Build of instance 500259b0-a57d-43ff-9c88-46190b6a3a10 was re-scheduled: Binding failed for port e4b7d217-6d21-4ae5-a1f4-cbf7611ff2be, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 813.293322] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 813.293322] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Acquiring lock "refresh_cache-500259b0-a57d-43ff-9c88-46190b6a3a10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.293469] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Acquired lock "refresh_cache-500259b0-a57d-43ff-9c88-46190b6a3a10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.293469] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.293469] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 86a60033b1bd464399f3d1b5b42f6781 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.304293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86a60033b1bd464399f3d1b5b42f6781 [ 813.462560] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.462795] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 8fa466965c8e4016b9845cd886c30b83 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.465423] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.465912] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 451a3bcafe1540c0909107711847b665 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.475313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fa466965c8e4016b9845cd886c30b83 [ 813.480270] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-4ad1a310-5786-4bb6-87ff-72069f692eff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.480613] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 813.480837] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 813.481485] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 451a3bcafe1540c0909107711847b665 [ 813.481889] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ab69c723-1437-41bb-9ff3-4d35caff9566 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.485616] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 813.485616] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 813.485616] env[61594]: DEBUG nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 813.485616] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 813.498513] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6319eb7-e0f7-46c8-a003-c34afcce57b7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.527453] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4ad1a310-5786-4bb6-87ff-72069f692eff could not be found. [ 813.527453] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 813.527453] env[61594]: INFO nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Took 0.05 seconds to destroy the instance on the hypervisor. [ 813.527453] env[61594]: DEBUG oslo.service.loopingcall [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 813.527705] env[61594]: DEBUG nova.compute.manager [-] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 813.527705] env[61594]: DEBUG nova.network.neutron [-] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 813.568798] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.582844] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.582844] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 7719195ee26741e6b058b5380cb5b977 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.595133] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7719195ee26741e6b058b5380cb5b977 [ 813.595133] env[61594]: DEBUG nova.network.neutron [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.595133] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg a980479bfcd74fefa81bef0b3ddadbdd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.605745] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a980479bfcd74fefa81bef0b3ddadbdd [ 813.605745] env[61594]: INFO nova.compute.manager [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Took 0.12 seconds to deallocate network for instance. [ 813.605745] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2e1d521032d74f7496a44dd004d3bbf4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.644617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e1d521032d74f7496a44dd004d3bbf4 [ 813.648888] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 4d89db315e2f47588e8de80f52347d09 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.658253] env[61594]: DEBUG nova.network.neutron [-] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.659351] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3d1b601178ef47c982923503cb8786cb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.669022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d1b601178ef47c982923503cb8786cb [ 813.669022] env[61594]: DEBUG nova.network.neutron [-] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.669022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 939609e6fd444ce382b756aa3d69f3dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.687836] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 939609e6fd444ce382b756aa3d69f3dc [ 813.688642] env[61594]: INFO nova.compute.manager [-] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Took 0.16 seconds to deallocate network for instance. [ 813.691819] env[61594]: DEBUG nova.compute.claims [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 813.692021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.692561] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.694781] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg e6fa9394a43f4a6c8de6d36aaede2b79 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.697099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4d89db315e2f47588e8de80f52347d09 [ 813.739099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6fa9394a43f4a6c8de6d36aaede2b79 [ 813.744354] env[61594]: INFO nova.scheduler.client.report [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Deleted allocations for instance 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3 [ 813.750975] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg cef607aa18e6428b9a8651c83e7e46d0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.766505] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cef607aa18e6428b9a8651c83e7e46d0 [ 813.767171] env[61594]: DEBUG oslo_concurrency.lockutils [None req-b4aa60c9-059c-4b71-b748-886528a0ce8f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.734s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.767464] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 9.754s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.767728] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 813.768065] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 813.770499] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.770499] env[61594]: INFO nova.compute.manager [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Terminating instance [ 813.773384] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 813.773384] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.773384] env[61594]: DEBUG nova.network.neutron [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 813.773601] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg cde5f52256ae4f328d30914a330932c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 813.792150] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cde5f52256ae4f328d30914a330932c3 [ 813.882486] env[61594]: DEBUG nova.network.neutron [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 813.910364] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2615c7-0276-4dbe-867d-9587b25271ab {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.918338] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2579864-1306-49d5-8d0f-616bfb293a4f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.960532] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d7c8de-0dbf-4bd2-a5b4-6f6577298a52 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.968701] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6b95a4-6202-42db-bef4-434bb0fae03c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.990660] env[61594]: DEBUG nova.compute.provider_tree [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.990660] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 356e8c96975c493eb7d808f064414278 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.001467] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.001982] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 0cbe020c8c8c48278602ce55daa793b6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.003052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 356e8c96975c493eb7d808f064414278 [ 814.003993] env[61594]: DEBUG nova.scheduler.client.report [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 814.006688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg d62cfab0b8574d268602ac50957a01d1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.010070] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0cbe020c8c8c48278602ce55daa793b6 [ 814.010710] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Releasing lock "refresh_cache-f7d0db3d-9247-409a-bf1e-4b53c2368ddc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.011310] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 814.011542] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 814.012052] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-414c6101-95e0-4cff-a859-278553ab97a1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.022203] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d81dfd-622a-4b23-8abf-eb8c92fec76b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.035934] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d62cfab0b8574d268602ac50957a01d1 [ 814.037369] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.345s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.040671] env[61594]: ERROR nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Traceback (most recent call last): [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self.driver.spawn(context, instance, image_meta, [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self._vmops.spawn(context, instance, image_meta, injected_files, [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] vm_ref = self.build_virtual_machine(instance, [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] vif_infos = vmwarevif.get_vif_info(self._session, [ 814.040671] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] for vif in network_info: [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return self._sync_wrapper(fn, *args, **kwargs) [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self.wait() [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self[:] = self._gt.wait() [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return self._exit_event.wait() [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] result = hub.switch() [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 814.041385] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return self.greenlet.switch() [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] result = function(*args, **kwargs) [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] return func(*args, **kwargs) [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] raise e [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] nwinfo = self.network_api.allocate_for_instance( [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] created_port_ids = self._update_ports_for_instance( [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] with excutils.save_and_reraise_exception(): [ 814.041808] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] self.force_reraise() [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] raise self.value [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] updated_port = self._update_port( [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] _ensure_no_port_binding_failure(port) [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] raise exception.PortBindingFailed(port_id=port['id']) [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] nova.exception.PortBindingFailed: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. [ 814.042183] env[61594]: ERROR nova.compute.manager [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] [ 814.042625] env[61594]: DEBUG nova.compute.utils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 814.042625] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Build of instance 4ad1a310-5786-4bb6-87ff-72069f692eff was re-scheduled: Binding failed for port c030586c-57a9-4ed0-bfb3-955b0961e165, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 814.042625] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 814.042625] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-4ad1a310-5786-4bb6-87ff-72069f692eff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.042887] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-4ad1a310-5786-4bb6-87ff-72069f692eff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.042887] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 814.042887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 3c4cfc5ea53d4f37a952b5a83a37f3a0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.053077] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f7d0db3d-9247-409a-bf1e-4b53c2368ddc could not be found. [ 814.053077] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 814.053077] env[61594]: INFO nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 814.053203] env[61594]: DEBUG oslo.service.loopingcall [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.053355] env[61594]: DEBUG nova.compute.manager [-] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 814.053416] env[61594]: DEBUG nova.network.neutron [-] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.056484] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.056931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 484f41e23c9f46f4b1344da796fcaec2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.057958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c4cfc5ea53d4f37a952b5a83a37f3a0 [ 814.067998] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 484f41e23c9f46f4b1344da796fcaec2 [ 814.067998] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Releasing lock "refresh_cache-500259b0-a57d-43ff-9c88-46190b6a3a10" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.067998] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 814.067998] env[61594]: DEBUG nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 814.067998] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.129263] env[61594]: DEBUG nova.network.neutron [-] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.129814] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 50f756272bf24a0bbaccb7f6840f5612 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.142696] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.142696] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50f756272bf24a0bbaccb7f6840f5612 [ 814.143973] env[61594]: DEBUG nova.network.neutron [-] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.143973] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 20d88e8c4b884a8587a577fe932c5d24 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.157224] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20d88e8c4b884a8587a577fe932c5d24 [ 814.157773] env[61594]: INFO nova.compute.manager [-] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Took 0.10 seconds to deallocate network for instance. [ 814.160534] env[61594]: DEBUG nova.compute.claims [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 814.160649] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.160813] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.164704] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 0873adae2ea24636bf30c274456ade5a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.169134] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.169134] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg b10a33b1fcf745fe9222554bc90e299e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.184845] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b10a33b1fcf745fe9222554bc90e299e [ 814.185508] env[61594]: DEBUG nova.network.neutron [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.186028] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg a40c7098d326445689ab3d05e780ac4a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.204112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a40c7098d326445689ab3d05e780ac4a [ 814.207970] env[61594]: INFO nova.compute.manager [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] [instance: 500259b0-a57d-43ff-9c88-46190b6a3a10] Took 0.14 seconds to deallocate network for instance. [ 814.208474] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 57b60f08d6c5471aa02a56bed9ef0c9f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.226210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0873adae2ea24636bf30c274456ade5a [ 814.266522] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57b60f08d6c5471aa02a56bed9ef0c9f [ 814.272779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 28d6bd2eadc9461aa6474a17628459c2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.327329] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28d6bd2eadc9461aa6474a17628459c2 [ 814.360188] env[61594]: INFO nova.scheduler.client.report [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Deleted allocations for instance 500259b0-a57d-43ff-9c88-46190b6a3a10 [ 814.376216] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Expecting reply to msg 18e758b4a161483d9cefae0e8f47e043 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.382072] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e504542-4efa-40e5-b24d-00e557f4ad04 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.390225] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c4dc498-ef8f-4a2e-a036-05b23bbebe0b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.425040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18e758b4a161483d9cefae0e8f47e043 [ 814.426040] env[61594]: DEBUG oslo_concurrency.lockutils [None req-60532300-c080-4475-a503-9720cbbac502 tempest-ServerPasswordTestJSON-1155626657 tempest-ServerPasswordTestJSON-1155626657-project-member] Lock "500259b0-a57d-43ff-9c88-46190b6a3a10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.693s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.426752] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b13119-f055-4754-b9fc-8825622891cc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.435265] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37326e03-0a2b-4267-a32a-d7b79e71a754 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.450897] env[61594]: DEBUG nova.compute.provider_tree [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.451580] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 40748a734d6e49f1a7820ae704085456 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.465964] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40748a734d6e49f1a7820ae704085456 [ 814.466905] env[61594]: DEBUG nova.scheduler.client.report [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 814.469253] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 910b63b360a34a80a151317fc5d4fb09 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.489152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 910b63b360a34a80a151317fc5d4fb09 [ 814.489152] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.327s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.489152] env[61594]: ERROR nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. [ 814.489152] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Traceback (most recent call last): [ 814.489152] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 814.489152] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self.driver.spawn(context, instance, image_meta, [ 814.489152] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 814.489152] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 814.489152] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] vm_ref = self.build_virtual_machine(instance, [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] vif_infos = vmwarevif.get_vif_info(self._session, [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] for vif in network_info: [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return self._sync_wrapper(fn, *args, **kwargs) [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self.wait() [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self[:] = self._gt.wait() [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 814.489510] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return self._exit_event.wait() [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] result = hub.switch() [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return self.greenlet.switch() [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] result = function(*args, **kwargs) [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] return func(*args, **kwargs) [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] raise e [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] nwinfo = self.network_api.allocate_for_instance( [ 814.489864] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] created_port_ids = self._update_ports_for_instance( [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] with excutils.save_and_reraise_exception(): [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] self.force_reraise() [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] raise self.value [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] updated_port = self._update_port( [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] _ensure_no_port_binding_failure(port) [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 814.490242] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] raise exception.PortBindingFailed(port_id=port['id']) [ 814.490609] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] nova.exception.PortBindingFailed: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. [ 814.490609] env[61594]: ERROR nova.compute.manager [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] [ 814.490609] env[61594]: DEBUG nova.compute.utils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 814.491792] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Build of instance f7d0db3d-9247-409a-bf1e-4b53c2368ddc was re-scheduled: Binding failed for port f32cbb9a-7422-4f1e-b576-b28aa8cc4371, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 814.492237] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 814.492465] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "refresh_cache-f7d0db3d-9247-409a-bf1e-4b53c2368ddc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 814.492766] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquired lock "refresh_cache-f7d0db3d-9247-409a-bf1e-4b53c2368ddc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.492766] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 814.493189] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 64400308ca6b4cdaa77abbb5e2d45be0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.506576] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64400308ca6b4cdaa77abbb5e2d45be0 [ 814.545182] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.545718] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg bca301bc2f0944f5a3252463e3bf25b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.558141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bca301bc2f0944f5a3252463e3bf25b1 [ 814.558844] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-4ad1a310-5786-4bb6-87ff-72069f692eff" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.559067] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 814.559255] env[61594]: DEBUG nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 814.563021] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.607176] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.607779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 14457b5ae158437889b6de48770ebad8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.617705] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14457b5ae158437889b6de48770ebad8 [ 814.618306] env[61594]: DEBUG nova.network.neutron [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.618809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 8e1be41e7dfc4264b99f966b6ed49171 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.631584] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e1be41e7dfc4264b99f966b6ed49171 [ 814.632235] env[61594]: INFO nova.compute.manager [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 4ad1a310-5786-4bb6-87ff-72069f692eff] Took 0.07 seconds to deallocate network for instance. [ 814.633889] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 72be2de198674d2cab1133b357689ec3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.654014] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.690568] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72be2de198674d2cab1133b357689ec3 [ 814.693622] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg ec78913acc8144a4813936af23ea28b3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.735754] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec78913acc8144a4813936af23ea28b3 [ 814.766283] env[61594]: INFO nova.scheduler.client.report [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Deleted allocations for instance 4ad1a310-5786-4bb6-87ff-72069f692eff [ 814.772795] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg bc852da9130d4465ac2a6cdccc5dc3e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.803915] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc852da9130d4465ac2a6cdccc5dc3e1 [ 814.804605] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a2687786-2462-4cea-aa9e-693ff5d747d9 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "4ad1a310-5786-4bb6-87ff-72069f692eff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.430s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 814.813461] env[61594]: DEBUG nova.network.neutron [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.813791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg f7d84154bfa649bb82d3542679583f2a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.832676] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f7d84154bfa649bb82d3542679583f2a [ 814.833384] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 814.833783] env[61594]: DEBUG nova.compute.manager [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 814.833980] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 814.834533] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f169948c-a57e-49d6-bfae-ba5bf057fac6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.845166] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1347755-b068-4afb-9ca2-972b6268d0d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.871048] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3 could not be found. [ 814.871550] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 814.871661] env[61594]: INFO nova.compute.manager [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 814.871925] env[61594]: DEBUG oslo.service.loopingcall [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 814.872215] env[61594]: DEBUG nova.compute.manager [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 814.872316] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 814.880349] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b8a5840d316f42c8abe0b60adf885458 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.893635] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8a5840d316f42c8abe0b60adf885458 [ 814.961665] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 814.962213] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 30322330ce9f4c0baea2a690000d4d1e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.970211] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30322330ce9f4c0baea2a690000d4d1e [ 814.970680] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.971251] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 79fbc15bd10e4cfcb1aa0b9137e454cb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 814.980129] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79fbc15bd10e4cfcb1aa0b9137e454cb [ 814.980408] env[61594]: INFO nova.compute.manager [-] [instance: 7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3] Took 0.11 seconds to deallocate network for instance. [ 814.985947] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg a28f01d516474f8dbd8dafd105721a0c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.024012] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a28f01d516474f8dbd8dafd105721a0c [ 815.038590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg d5a27f2827f740b3b3284f15a5dc63e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.092836] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5a27f2827f740b3b3284f15a5dc63e1 [ 815.096558] env[61594]: DEBUG oslo_concurrency.lockutils [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "7cf10fb8-e3db-44fb-b9c3-4b9bb92d18e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.329s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.097141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-28c5ee6d-f17c-4b15-9060-0e542acc2c1f tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 7ee406e32a2f4583b8fa7de8202ce1e9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.107600] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ee406e32a2f4583b8fa7de8202ce1e9 [ 815.360413] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.360413] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg fd1f145d3fbe48f68f7d164b52456289 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.369123] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd1f145d3fbe48f68f7d164b52456289 [ 815.369123] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Releasing lock "refresh_cache-f7d0db3d-9247-409a-bf1e-4b53c2368ddc" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 815.369123] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 815.369123] env[61594]: DEBUG nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 815.369123] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 815.442996] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 815.443569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 2723e8d290e94804bd761c72ed417706 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.451372] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2723e8d290e94804bd761c72ed417706 [ 815.453287] env[61594]: DEBUG nova.network.neutron [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.453658] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 70ae45f02f4d4708aea1c37f58c7745b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.467986] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70ae45f02f4d4708aea1c37f58c7745b [ 815.468980] env[61594]: INFO nova.compute.manager [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: f7d0db3d-9247-409a-bf1e-4b53c2368ddc] Took 0.10 seconds to deallocate network for instance. [ 815.471970] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg a61984a173614d1f8d2b51ee62093301 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.520150] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a61984a173614d1f8d2b51ee62093301 [ 815.523321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 179f754ca49b430a8a4f42c99b6180c2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.571590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 179f754ca49b430a8a4f42c99b6180c2 [ 815.602709] env[61594]: INFO nova.scheduler.client.report [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Deleted allocations for instance f7d0db3d-9247-409a-bf1e-4b53c2368ddc [ 815.613017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 28acdcac705e495f9a80676e9d7c3f74 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 815.629275] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28acdcac705e495f9a80676e9d7c3f74 [ 815.629275] env[61594]: DEBUG oslo_concurrency.lockutils [None req-30746edb-2bbc-445d-bd61-e88a0aad8845 tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "f7d0db3d-9247-409a-bf1e-4b53c2368ddc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.855s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 816.876786] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "9c6c9639-614f-4ffa-a6db-e70ef37b9954" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.877132] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "9c6c9639-614f-4ffa-a6db-e70ef37b9954" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.877493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg ffa7c02927454248af9747399c31f0bc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 816.898460] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ffa7c02927454248af9747399c31f0bc [ 816.900530] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 816.900877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg b5729be2fefe425a8fb3bfc1843d861e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 816.923579] env[61594]: ERROR nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. [ 816.923579] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 816.923579] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 816.923579] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 816.923579] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 816.923579] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 816.923579] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 816.923579] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 816.923579] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.923579] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 816.923579] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.923579] env[61594]: ERROR nova.compute.manager raise self.value [ 816.923579] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 816.923579] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 816.923579] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.923579] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 816.924158] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.924158] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 816.924158] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. [ 816.924158] env[61594]: ERROR nova.compute.manager [ 816.924158] env[61594]: Traceback (most recent call last): [ 816.924158] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 816.924158] env[61594]: listener.cb(fileno) [ 816.924158] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 816.924158] env[61594]: result = function(*args, **kwargs) [ 816.924158] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 816.924158] env[61594]: return func(*args, **kwargs) [ 816.924158] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 816.924158] env[61594]: raise e [ 816.924158] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 816.924158] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 816.924158] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 816.924158] env[61594]: created_port_ids = self._update_ports_for_instance( [ 816.924158] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 816.924158] env[61594]: with excutils.save_and_reraise_exception(): [ 816.924158] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.924158] env[61594]: self.force_reraise() [ 816.924158] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.924158] env[61594]: raise self.value [ 816.924158] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 816.924158] env[61594]: updated_port = self._update_port( [ 816.924158] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.924158] env[61594]: _ensure_no_port_binding_failure(port) [ 816.924158] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.924158] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 816.924946] env[61594]: nova.exception.PortBindingFailed: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. [ 816.924946] env[61594]: Removing descriptor: 20 [ 816.924946] env[61594]: ERROR nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Traceback (most recent call last): [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] yield resources [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self.driver.spawn(context, instance, image_meta, [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 816.924946] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] vm_ref = self.build_virtual_machine(instance, [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] vif_infos = vmwarevif.get_vif_info(self._session, [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] for vif in network_info: [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return self._sync_wrapper(fn, *args, **kwargs) [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self.wait() [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self[:] = self._gt.wait() [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return self._exit_event.wait() [ 816.925339] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] result = hub.switch() [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return self.greenlet.switch() [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] result = function(*args, **kwargs) [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return func(*args, **kwargs) [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] raise e [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] nwinfo = self.network_api.allocate_for_instance( [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 816.925788] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] created_port_ids = self._update_ports_for_instance( [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] with excutils.save_and_reraise_exception(): [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self.force_reraise() [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] raise self.value [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] updated_port = self._update_port( [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] _ensure_no_port_binding_failure(port) [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.926200] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] raise exception.PortBindingFailed(port_id=port['id']) [ 816.926539] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] nova.exception.PortBindingFailed: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. [ 816.926539] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] [ 816.926539] env[61594]: INFO nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Terminating instance [ 816.928949] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Acquiring lock "refresh_cache-362c5924-06e1-4385-a1f8-6b0556f7ba8b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.929246] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Acquired lock "refresh_cache-362c5924-06e1-4385-a1f8-6b0556f7ba8b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.929520] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 816.930154] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg e6b3f6da63a94110bb9a318aab2c03ac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 816.942118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6b3f6da63a94110bb9a318aab2c03ac [ 816.956097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b5729be2fefe425a8fb3bfc1843d861e [ 816.966120] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.981736] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.982029] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.983487] env[61594]: INFO nova.compute.claims [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.985800] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 87755e10460e4fe883f03cd4001904e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.028175] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87755e10460e4fe883f03cd4001904e0 [ 817.030280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 4db46da88ea2477c8b7f4cd054026717 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.038936] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4db46da88ea2477c8b7f4cd054026717 [ 817.152543] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e08f85d0-386a-4846-b1d3-5eb8424ffcdb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.162215] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.162215] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg a8adf1ccaa4c4b86ad15728998f7b690 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.164728] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43dd900-44d9-4313-b9d8-ac60a2a71793 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.201095] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8adf1ccaa4c4b86ad15728998f7b690 [ 817.201956] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Releasing lock "refresh_cache-362c5924-06e1-4385-a1f8-6b0556f7ba8b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.202356] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 817.202542] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 817.206019] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0685fc1d-833f-47bc-9bff-bbf794021df1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.206881] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-269916b9-12a9-4cd3-a8d9-c03bf6c47e20 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.221745] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2d222a3-1f16-48b2-b7ea-a324337524e4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.229347] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fc7294-68e0-4634-b915-e693da0c2ba1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.250519] env[61594]: DEBUG nova.compute.provider_tree [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.251039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 406419f0b68a4e3eab345196f4ecbdbb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.256327] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 362c5924-06e1-4385-a1f8-6b0556f7ba8b could not be found. [ 817.256566] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 817.256759] env[61594]: INFO nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 817.257016] env[61594]: DEBUG oslo.service.loopingcall [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.257726] env[61594]: DEBUG nova.compute.manager [-] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 817.257829] env[61594]: DEBUG nova.network.neutron [-] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 817.265159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 406419f0b68a4e3eab345196f4ecbdbb [ 817.266376] env[61594]: DEBUG nova.scheduler.client.report [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 817.268618] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 23c04960c48f4a64ac1953ff7bb5215e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.282284] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23c04960c48f4a64ac1953ff7bb5215e [ 817.283154] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.301s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.283636] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 817.288016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg c8ea1110fe944fa983b46bdadf1b8d35 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.317999] env[61594]: DEBUG nova.network.neutron [-] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 817.318585] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 895f4262cb5e40c699d9b844dd58e8e9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.324560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8ea1110fe944fa983b46bdadf1b8d35 [ 817.326266] env[61594]: DEBUG nova.compute.utils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 817.326854] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg efa5b1e1258b4697802b9eb986ba036e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.327913] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 895f4262cb5e40c699d9b844dd58e8e9 [ 817.328340] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 817.328537] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 817.335017] env[61594]: DEBUG nova.network.neutron [-] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.335017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 12c4a97c71544568ab2441b00d68cdfe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.345693] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12c4a97c71544568ab2441b00d68cdfe [ 817.346208] env[61594]: INFO nova.compute.manager [-] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Took 0.09 seconds to deallocate network for instance. [ 817.348245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efa5b1e1258b4697802b9eb986ba036e [ 817.349193] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 817.351299] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 1911c05ebf0a4b719b3c8e155ec0a186 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.352386] env[61594]: DEBUG nova.compute.claims [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 817.352565] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.352778] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.354726] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 2a6a474ddf5a4ebf97800dfe7a25b51d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.398325] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1911c05ebf0a4b719b3c8e155ec0a186 [ 817.399067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a6a474ddf5a4ebf97800dfe7a25b51d [ 817.403683] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 429b834e70ab42498440e6a0cf190609 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.460373] env[61594]: DEBUG nova.policy [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7ce2159a0ccb46c89a7574d04142e926', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dbc2efe50ed4ae5a5f0cf6f492a20bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 817.465322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 429b834e70ab42498440e6a0cf190609 [ 817.466964] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 817.504612] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 817.504612] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 817.504803] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 817.504841] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 817.504983] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 817.506643] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 817.506643] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 817.506643] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 817.506643] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 817.506643] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 817.507308] env[61594]: DEBUG nova.virt.hardware [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 817.508455] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbdbc03-1330-441a-8d17-4407689875a3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.523388] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c460e7b-7c91-4863-8e5d-37b479c07600 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.548563] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc06f0b-9166-4b98-b102-d9ce02ac968b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.552168] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "bbd46264-8992-4e56-9896-675500fe587b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.553385] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "bbd46264-8992-4e56-9896-675500fe587b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.553385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg a4cacef1136e423f89cf08e1b76fd86d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.559664] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e821491-2ea7-4ebd-af64-2246a701dafd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.596831] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4cacef1136e423f89cf08e1b76fd86d [ 817.598287] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b46d6a-8fd4-46d4-a271-69ec29a94811 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.602025] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 817.602254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 884d5cb016d54017ba0a3169f3c10cc6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.608593] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7790cd77-7f2e-4688-b837-885e2e01c4a8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.625240] env[61594]: DEBUG nova.compute.provider_tree [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.625240] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 67404af9e51d473cb5dbea67e1e77776 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.635019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67404af9e51d473cb5dbea67e1e77776 [ 817.638166] env[61594]: DEBUG nova.scheduler.client.report [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 817.638271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg dd67e58f64c4466b830d5bcd6f2c7b80 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.656230] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd67e58f64c4466b830d5bcd6f2c7b80 [ 817.657216] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.304s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.659048] env[61594]: ERROR nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Traceback (most recent call last): [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self.driver.spawn(context, instance, image_meta, [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] vm_ref = self.build_virtual_machine(instance, [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] vif_infos = vmwarevif.get_vif_info(self._session, [ 817.659048] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] for vif in network_info: [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return self._sync_wrapper(fn, *args, **kwargs) [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self.wait() [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self[:] = self._gt.wait() [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return self._exit_event.wait() [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] result = hub.switch() [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 817.659485] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return self.greenlet.switch() [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] result = function(*args, **kwargs) [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] return func(*args, **kwargs) [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] raise e [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] nwinfo = self.network_api.allocate_for_instance( [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] created_port_ids = self._update_ports_for_instance( [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] with excutils.save_and_reraise_exception(): [ 817.659844] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] self.force_reraise() [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] raise self.value [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] updated_port = self._update_port( [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] _ensure_no_port_binding_failure(port) [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] raise exception.PortBindingFailed(port_id=port['id']) [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] nova.exception.PortBindingFailed: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. [ 817.660232] env[61594]: ERROR nova.compute.manager [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] [ 817.660557] env[61594]: DEBUG nova.compute.utils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 817.664348] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 884d5cb016d54017ba0a3169f3c10cc6 [ 817.664828] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Build of instance 362c5924-06e1-4385-a1f8-6b0556f7ba8b was re-scheduled: Binding failed for port b183ba56-040d-4b47-8d9f-2483248eb675, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 817.665307] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 817.665592] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Acquiring lock "refresh_cache-362c5924-06e1-4385-a1f8-6b0556f7ba8b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 817.665751] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Acquired lock "refresh_cache-362c5924-06e1-4385-a1f8-6b0556f7ba8b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.665913] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 817.666411] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 347b61c53d394d30999124607e1b7893 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.677929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 347b61c53d394d30999124607e1b7893 [ 817.690542] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 817.690810] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 817.692386] env[61594]: INFO nova.compute.claims [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 817.694267] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 18e6a67090af4ee18ac42d932124323f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.734322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 18e6a67090af4ee18ac42d932124323f [ 817.736334] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 74408991890249908cd5c0117a18a2de in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.737783] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 817.749036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74408991890249908cd5c0117a18a2de [ 817.859242] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc712941-5951-41e1-8ba2-93735a78ad6c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.868478] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8fa942-53c1-43e1-9db0-329e9b5f9562 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.902751] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe230b55-4f09-4389-be7b-d1790c86538d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.911329] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17241f21-7e5c-4a05-8713-732b4190d86f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.926309] env[61594]: DEBUG nova.compute.provider_tree [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.926815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c718fd4a79694fc49f8404c20145721a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.946032] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c718fd4a79694fc49f8404c20145721a [ 817.946032] env[61594]: DEBUG nova.scheduler.client.report [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 817.947466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 0fc4a38ec403442582ce84dc3dfd0880 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.962966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0fc4a38ec403442582ce84dc3dfd0880 [ 817.963615] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.272s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 817.964399] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 817.965972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 4004bb3506ab42688e2ec4ebc5282ed1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 817.997344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4004bb3506ab42688e2ec4ebc5282ed1 [ 818.002229] env[61594]: DEBUG nova.compute.utils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 818.002229] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg cf17ce61234344e4aeed4f473a088220 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.003844] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 818.004026] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 818.015706] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf17ce61234344e4aeed4f473a088220 [ 818.016598] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 818.018038] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 61ac81ddceda436d877b3cc6c236a574 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.050106] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61ac81ddceda436d877b3cc6c236a574 [ 818.054353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg fb8a9c67b3a440d49a2ceac5bd170bc4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.090556] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb8a9c67b3a440d49a2ceac5bd170bc4 [ 818.092173] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 818.122124] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 818.122372] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 818.122601] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.122750] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 818.122841] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.122988] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 818.123214] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 818.123372] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 818.123532] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 818.123698] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 818.123871] env[61594]: DEBUG nova.virt.hardware [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 818.124765] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c32cb79-5dc8-4921-9d2d-b644c7ccec48 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.136382] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8154d38d-a8c1-42e1-836f-1f1ffedb6202 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.521728] env[61594]: DEBUG nova.policy [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f05a61e0f6499bb35c44d254226249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bff3be1976444e58a2b7be93d47f50ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 818.532948] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.533605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 77abc242141e498d941e4feb1e388c9d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.548807] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77abc242141e498d941e4feb1e388c9d [ 818.549735] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Releasing lock "refresh_cache-362c5924-06e1-4385-a1f8-6b0556f7ba8b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 818.553121] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 818.553350] env[61594]: DEBUG nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 818.553526] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 818.590507] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Successfully created port: 55011e89-cff6-4abb-9a03-2344a1163928 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.654582] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.655198] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg ed1942f37ff546ba9c2540ab2bb5bde2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.669754] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed1942f37ff546ba9c2540ab2bb5bde2 [ 818.670434] env[61594]: DEBUG nova.network.neutron [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.670959] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg a2c6ea3ec61448009612d50f49a628da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.686801] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2c6ea3ec61448009612d50f49a628da [ 818.688267] env[61594]: INFO nova.compute.manager [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] [instance: 362c5924-06e1-4385-a1f8-6b0556f7ba8b] Took 0.13 seconds to deallocate network for instance. [ 818.690546] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 0260e7eca2224ca488365dfc6bb0d3b7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.756380] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0260e7eca2224ca488365dfc6bb0d3b7 [ 818.756380] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg a400c2ec9f7640bdaa0faf5395b7a5cd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.793028] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a400c2ec9f7640bdaa0faf5395b7a5cd [ 818.827657] env[61594]: INFO nova.scheduler.client.report [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Deleted allocations for instance 362c5924-06e1-4385-a1f8-6b0556f7ba8b [ 818.838809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Expecting reply to msg 7f93bf75c78f4a27b0511908c34e9cdf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 818.857367] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7f93bf75c78f4a27b0511908c34e9cdf [ 818.858026] env[61594]: DEBUG oslo_concurrency.lockutils [None req-604e9287-9bb1-43fc-976a-96f46dd10c4c tempest-ServersTestFqdnHostnames-1549182594 tempest-ServersTestFqdnHostnames-1549182594-project-member] Lock "362c5924-06e1-4385-a1f8-6b0556f7ba8b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 8.044s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.124663] env[61594]: ERROR nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. [ 819.124663] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 819.124663] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 819.124663] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 819.124663] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 819.124663] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 819.124663] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 819.124663] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 819.124663] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.124663] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 819.124663] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.124663] env[61594]: ERROR nova.compute.manager raise self.value [ 819.124663] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 819.124663] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 819.124663] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.124663] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 819.125631] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.125631] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 819.125631] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. [ 819.125631] env[61594]: ERROR nova.compute.manager [ 819.125631] env[61594]: Traceback (most recent call last): [ 819.125631] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 819.125631] env[61594]: listener.cb(fileno) [ 819.125631] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 819.125631] env[61594]: result = function(*args, **kwargs) [ 819.125631] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 819.125631] env[61594]: return func(*args, **kwargs) [ 819.125631] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 819.125631] env[61594]: raise e [ 819.125631] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 819.125631] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 819.125631] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 819.125631] env[61594]: created_port_ids = self._update_ports_for_instance( [ 819.125631] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 819.125631] env[61594]: with excutils.save_and_reraise_exception(): [ 819.125631] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.125631] env[61594]: self.force_reraise() [ 819.125631] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.125631] env[61594]: raise self.value [ 819.125631] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 819.125631] env[61594]: updated_port = self._update_port( [ 819.125631] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.125631] env[61594]: _ensure_no_port_binding_failure(port) [ 819.125631] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.125631] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 819.127988] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. [ 819.127988] env[61594]: Removing descriptor: 24 [ 819.127988] env[61594]: ERROR nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Traceback (most recent call last): [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] yield resources [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self.driver.spawn(context, instance, image_meta, [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 819.127988] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] vm_ref = self.build_virtual_machine(instance, [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] vif_infos = vmwarevif.get_vif_info(self._session, [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] for vif in network_info: [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return self._sync_wrapper(fn, *args, **kwargs) [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self.wait() [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self[:] = self._gt.wait() [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return self._exit_event.wait() [ 819.129074] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] result = hub.switch() [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return self.greenlet.switch() [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] result = function(*args, **kwargs) [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return func(*args, **kwargs) [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] raise e [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] nwinfo = self.network_api.allocate_for_instance( [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 819.133369] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] created_port_ids = self._update_ports_for_instance( [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] with excutils.save_and_reraise_exception(): [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self.force_reraise() [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] raise self.value [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] updated_port = self._update_port( [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] _ensure_no_port_binding_failure(port) [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.133820] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] raise exception.PortBindingFailed(port_id=port['id']) [ 819.134153] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] nova.exception.PortBindingFailed: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. [ 819.134153] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] [ 819.134153] env[61594]: INFO nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Terminating instance [ 819.134153] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.134153] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.134153] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 819.134338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 77fd989c6a5b46c99fc56bb5fbada149 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 819.143413] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77fd989c6a5b46c99fc56bb5fbada149 [ 819.216118] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 819.944156] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.944566] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg dbb57fa121a94736be90905871326aa9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 819.964590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbb57fa121a94736be90905871326aa9 [ 819.968666] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 819.970686] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 819.970686] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 819.970686] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6d7d635-3e40-4d9d-828c-fd299bcdd6e9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.979692] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805f395b-e04f-41b6-ad6e-8b35d3af97d8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.007012] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a could not be found. [ 820.007115] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 820.007348] env[61594]: INFO nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 820.007711] env[61594]: DEBUG oslo.service.loopingcall [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 820.008557] env[61594]: DEBUG nova.compute.manager [-] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 820.008716] env[61594]: DEBUG nova.network.neutron [-] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 820.094159] env[61594]: DEBUG nova.network.neutron [-] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.094703] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 623a6534b2c046379ca250ee184b35f3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.114972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 623a6534b2c046379ca250ee184b35f3 [ 820.115478] env[61594]: DEBUG nova.network.neutron [-] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.116127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg d310f0fa100f400f8e59e7b93393db28 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.133675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d310f0fa100f400f8e59e7b93393db28 [ 820.134370] env[61594]: INFO nova.compute.manager [-] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Took 0.13 seconds to deallocate network for instance. [ 820.137236] env[61594]: DEBUG nova.compute.claims [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 820.137236] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.137526] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.142323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 891f23dcd27e4d69b578aeb984f82818 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.203574] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 891f23dcd27e4d69b578aeb984f82818 [ 820.227312] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Successfully created port: fbd834cc-c82f-429f-889a-1b197f4186b6 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 820.333018] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ba9daa8-9d8e-423c-ba95-143aff3f5b05 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.340489] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "848010fd-76ed-43d9-8d74-62b09062a2b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.341039] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "848010fd-76ed-43d9-8d74-62b09062a2b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.341668] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 74aa35c003aa4321bf714f1924b5cd38 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.346237] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb5a9dd-aa21-4500-8a3c-f8c931c67fd5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.386259] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 74aa35c003aa4321bf714f1924b5cd38 [ 820.387454] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ad07b0-a458-412e-b65e-efcbc1a0635c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.393727] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 820.395852] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg afec7176a39c4dc98b23571e40a8a533 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.404300] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25335c97-2356-4199-86e9-a4b76dadf013 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.422584] env[61594]: DEBUG nova.compute.provider_tree [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.423031] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 49362e07145441e1bf88119235ca7cdc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.439264] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Acquiring lock "7cacf5b7-500d-493c-a86a-2a8a03cc6eac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.439513] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Lock "7cacf5b7-500d-493c-a86a-2a8a03cc6eac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.439979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 07d14eff1b5949abb9f379e6d307a96f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.445027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 49362e07145441e1bf88119235ca7cdc [ 820.446183] env[61594]: DEBUG nova.scheduler.client.report [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 820.449258] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 4a88d73ce3f54f1bbd331d9bc78ff67e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.466479] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a88d73ce3f54f1bbd331d9bc78ff67e [ 820.467365] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.330s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.468096] env[61594]: ERROR nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Traceback (most recent call last): [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self.driver.spawn(context, instance, image_meta, [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] vm_ref = self.build_virtual_machine(instance, [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] vif_infos = vmwarevif.get_vif_info(self._session, [ 820.468096] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] for vif in network_info: [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return self._sync_wrapper(fn, *args, **kwargs) [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self.wait() [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self[:] = self._gt.wait() [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return self._exit_event.wait() [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] result = hub.switch() [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 820.468507] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return self.greenlet.switch() [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] result = function(*args, **kwargs) [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] return func(*args, **kwargs) [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] raise e [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] nwinfo = self.network_api.allocate_for_instance( [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] created_port_ids = self._update_ports_for_instance( [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] with excutils.save_and_reraise_exception(): [ 820.468913] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] self.force_reraise() [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] raise self.value [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] updated_port = self._update_port( [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] _ensure_no_port_binding_failure(port) [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] raise exception.PortBindingFailed(port_id=port['id']) [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] nova.exception.PortBindingFailed: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. [ 820.469428] env[61594]: ERROR nova.compute.manager [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] [ 820.469906] env[61594]: DEBUG nova.compute.utils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 820.475319] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afec7176a39c4dc98b23571e40a8a533 [ 820.475620] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07d14eff1b5949abb9f379e6d307a96f [ 820.478390] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 820.480595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 842155d94ae348bab08ac35ccae8fb46 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.484157] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Build of instance 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a was re-scheduled: Binding failed for port 24dbe2dd-6847-4ffd-9ba1-9ea5207dd415, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 820.484640] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 820.487682] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.487682] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.487682] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 820.487682] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 8d9955ac33b74853837da191752af98e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.503485] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d9955ac33b74853837da191752af98e [ 820.527135] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.527135] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.531088] env[61594]: INFO nova.compute.claims [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.532904] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg f9f961e90d2240c1a4163d45bf07058a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.537615] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.558296] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 842155d94ae348bab08ac35ccae8fb46 [ 820.583358] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.589031] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9f961e90d2240c1a4163d45bf07058a [ 820.590936] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 8916cf0bc0b14eae9bf067fde07ea792 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.604149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8916cf0bc0b14eae9bf067fde07ea792 [ 820.753986] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c358ab51-2f56-429a-8e69-1aca081414e0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.763420] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239a43e4-1ca4-48b1-8048-7e127837b8aa {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.801909] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599da47c-75ea-4bf3-8a57-d11276586992 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.809523] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ff6ce2-7e4b-4833-b317-79c153cb9210 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.827605] env[61594]: DEBUG nova.compute.provider_tree [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.828047] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg f8c9ad8e8e354fab988290ae04fa126b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.842416] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f8c9ad8e8e354fab988290ae04fa126b [ 820.844286] env[61594]: DEBUG nova.scheduler.client.report [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 820.847307] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 6f7f5f5683684f0883eb30dc7fe7eb65 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.862205] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f7f5f5683684f0883eb30dc7fe7eb65 [ 820.863145] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 820.864141] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 820.866490] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 4503498c8d11485da264c86ea20a7ee1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.867626] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.284s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.869325] env[61594]: INFO nova.compute.claims [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 820.871484] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg a5c74cc83cb84ce1aebf15bdc7ca3e2c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.906114] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5c74cc83cb84ce1aebf15bdc7ca3e2c [ 820.908181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 1e2107700a2041d7a7ac048060d9712e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.942158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4503498c8d11485da264c86ea20a7ee1 [ 820.943654] env[61594]: DEBUG nova.compute.utils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 820.944268] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 0ac09f288e934869b5bc8a2f47a4d150 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 820.946152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e2107700a2041d7a7ac048060d9712e [ 820.946279] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 820.946498] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 820.964285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ac09f288e934869b5bc8a2f47a4d150 [ 820.964285] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 820.966195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 3ca34be6b7fc43998ba01c8ed28f9f7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.012022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ca34be6b7fc43998ba01c8ed28f9f7e [ 821.014979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg f39ecb4876f940afb8a67091a405bdb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.050751] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f39ecb4876f940afb8a67091a405bdb4 [ 821.053258] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 821.079935] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb8760b-9839-4953-8ecd-6c8933ce0cdf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.086381] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 821.086622] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 821.086781] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.087051] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 821.087226] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.087392] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 821.087580] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 821.087741] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 821.087906] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 821.088082] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 821.088256] env[61594]: DEBUG nova.virt.hardware [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 821.089317] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c07fd83-4996-4355-af5b-88970f9d1fcc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.098562] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c89ea8e4-ac78-4768-9bc5-36aee9af8602 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.102602] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8218e478-2846-4f32-9303-60842b34359a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.141627] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad07e0c-5065-4487-b7a8-5ca0d92493ad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.148769] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ab3445-3261-4ffe-8cb5-1163ce20960b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.162238] env[61594]: DEBUG nova.compute.provider_tree [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 821.162674] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg f890f8cd1a07427fb315c39ca2aed475 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.177159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f890f8cd1a07427fb315c39ca2aed475 [ 821.178996] env[61594]: DEBUG nova.scheduler.client.report [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 821.180313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg bf19dd121d8a4bfbae8277ede03e1104 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.191688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf19dd121d8a4bfbae8277ede03e1104 [ 821.192508] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.325s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.192983] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 821.194587] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 8fe72155464b427784cf7b78c199ed08 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.234091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8fe72155464b427784cf7b78c199ed08 [ 821.235235] env[61594]: DEBUG nova.compute.utils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 821.235814] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 8605aacc33ab45799eb3d2d3b8f02eab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.236739] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 821.237162] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 821.249170] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8605aacc33ab45799eb3d2d3b8f02eab [ 821.249870] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 821.251530] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg eba8a0f10d414cdea03b2fb33205a7bf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.294024] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.294024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 385b8b3bd2ce4243b5a35a60f872e886 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.294024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eba8a0f10d414cdea03b2fb33205a7bf [ 821.296066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 15ad7c68c32f4fb2ac3f9f7fbda1144e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.303309] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 385b8b3bd2ce4243b5a35a60f872e886 [ 821.303309] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.303309] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 821.303309] env[61594]: DEBUG nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 821.303309] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 821.326882] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15ad7c68c32f4fb2ac3f9f7fbda1144e [ 821.328014] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 821.351543] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 821.351810] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 821.351971] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.352193] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 821.352323] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.352470] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 821.352678] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 821.352839] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 821.353014] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 821.353534] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 821.353729] env[61594]: DEBUG nova.virt.hardware [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 821.354589] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3293ec71-6ca7-4bbd-8e20-a3afc44e6062 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.364451] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d14fb1-679b-4cf3-9d1e-b05688e4d2d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.396128] env[61594]: DEBUG nova.policy [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d6b88b1f4394a63b32be00fc9dcc761', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '57787a709d744ea4a19a2cfb923d89d4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 821.455140] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 821.455903] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ca74786f936c4a73b94155cca9ce10a8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.463889] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca74786f936c4a73b94155cca9ce10a8 [ 821.464460] env[61594]: DEBUG nova.network.neutron [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.464918] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 59a6a5ada49444a08aeec7c7750811a2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.474075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59a6a5ada49444a08aeec7c7750811a2 [ 821.474663] env[61594]: INFO nova.compute.manager [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a] Took 0.17 seconds to deallocate network for instance. [ 821.476314] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ee16645b5b184180b2d91a6ee1caba27 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.519202] env[61594]: DEBUG nova.policy [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e5a10b199013411d80830a41a0abb711', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ae1347959e924d97b351fdd7c40c4777', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 821.528572] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ee16645b5b184180b2d91a6ee1caba27 [ 821.532653] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 3791e5506ecb463cb7706bcb6be8818a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.574700] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3791e5506ecb463cb7706bcb6be8818a [ 821.608408] env[61594]: INFO nova.scheduler.client.report [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Deleted allocations for instance 8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a [ 821.617444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 8c63eccba0b44d5ab4cc1f429d4a7af0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 821.636102] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8c63eccba0b44d5ab4cc1f429d4a7af0 [ 821.636854] env[61594]: DEBUG oslo_concurrency.lockutils [None req-f366b3e9-58eb-46da-a72b-978eed57d136 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "8bf9e7dc-659d-46a3-8016-c09d0c3c0f5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.642s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.296904] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Successfully created port: 64a4ae94-4388-4b11-ab1e-f234740d4680 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.510465] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Successfully created port: 87f8c98f-4a27-42fc-b13f-d8e6af9b323a {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.453496] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "382b96fa-d807-4f96-b47e-784dfcd26437" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.454083] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "382b96fa-d807-4f96-b47e-784dfcd26437" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.454664] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 81730d0cee7d4501a8eae8fee50f515e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.465075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81730d0cee7d4501a8eae8fee50f515e [ 824.465722] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 824.467539] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg f94b8c05fd5946bab0eed6aad9fc598b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.513352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f94b8c05fd5946bab0eed6aad9fc598b [ 824.535764] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.535764] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.536678] env[61594]: INFO nova.compute.claims [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.539410] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg ef180f252a3e4c2e8257c095820d73ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.588031] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef180f252a3e4c2e8257c095820d73ff [ 824.589057] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 89dc8e77a8b947699ba992c2eb302b7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.600167] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89dc8e77a8b947699ba992c2eb302b7e [ 824.724112] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23f70ecc-ac84-424a-aa85-7b12c81c0597 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.733538] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fca0c6-d629-42ee-8573-8043b19422a8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.771484] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-606aead9-da67-4c2b-9af6-5631ed74d57f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.779417] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f59d538-37f8-49b7-a7a7-e3e0dc820e72 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.794031] env[61594]: DEBUG nova.compute.provider_tree [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.794031] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 6d1af13f88904bbba235ce18e60efeda in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.804110] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d1af13f88904bbba235ce18e60efeda [ 824.805491] env[61594]: DEBUG nova.scheduler.client.report [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 824.811026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg fb96c677e2fc40b59e87e666ba64296e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.823532] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb96c677e2fc40b59e87e666ba64296e [ 824.824410] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.290s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.824901] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 824.830091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 31ae50e8604648a69937df645a9d7034 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.874449] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31ae50e8604648a69937df645a9d7034 [ 824.875680] env[61594]: DEBUG nova.compute.utils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 824.879865] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 9837777778c84e968f09b18f5beedcb0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.880825] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 824.881519] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 824.893409] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9837777778c84e968f09b18f5beedcb0 [ 824.894016] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 824.895751] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg adda6c68fbe44295b3ad6856b9daefe2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.930842] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg adda6c68fbe44295b3ad6856b9daefe2 [ 824.934153] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 7e1ba233b5084238b3bbcedae2edd19f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 824.972804] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e1ba233b5084238b3bbcedae2edd19f [ 824.974082] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 825.002186] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 825.002441] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 825.002603] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.002786] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 825.002932] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.003104] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 825.003319] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 825.003485] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 825.003655] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 825.003819] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 825.004008] env[61594]: DEBUG nova.virt.hardware [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 825.004900] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8438e64-47a4-43d4-a696-e334254db310 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.014314] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d39534e-cbc8-4f3a-9b32-8f44bef587ca {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.151408] env[61594]: DEBUG nova.compute.manager [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Received event network-changed-55011e89-cff6-4abb-9a03-2344a1163928 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 825.151635] env[61594]: DEBUG nova.compute.manager [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Refreshing instance network info cache due to event network-changed-55011e89-cff6-4abb-9a03-2344a1163928. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 825.151947] env[61594]: DEBUG oslo_concurrency.lockutils [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] Acquiring lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.152035] env[61594]: DEBUG oslo_concurrency.lockutils [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] Acquired lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.152195] env[61594]: DEBUG nova.network.neutron [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Refreshing network info cache for port 55011e89-cff6-4abb-9a03-2344a1163928 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 825.152611] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] Expecting reply to msg 9b05d0da585f4e90aa7e479ed61d7440 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 825.163084] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b05d0da585f4e90aa7e479ed61d7440 [ 825.172246] env[61594]: DEBUG nova.policy [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06291742abbe4dee8092fba657b8ab91', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b54f5acf42c64133afbb208929492c31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 825.246918] env[61594]: DEBUG nova.network.neutron [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 825.410401] env[61594]: ERROR nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. [ 825.410401] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 825.410401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 825.410401] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 825.410401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 825.410401] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 825.410401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 825.410401] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 825.410401] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.410401] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 825.410401] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.410401] env[61594]: ERROR nova.compute.manager raise self.value [ 825.410401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 825.410401] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 825.410401] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.410401] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 825.410986] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.410986] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 825.410986] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. [ 825.410986] env[61594]: ERROR nova.compute.manager [ 825.411366] env[61594]: Traceback (most recent call last): [ 825.411433] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 825.411433] env[61594]: listener.cb(fileno) [ 825.411433] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 825.411433] env[61594]: result = function(*args, **kwargs) [ 825.411433] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 825.411433] env[61594]: return func(*args, **kwargs) [ 825.411433] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 825.411433] env[61594]: raise e [ 825.411433] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 825.411433] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 825.411433] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 825.411433] env[61594]: created_port_ids = self._update_ports_for_instance( [ 825.411433] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 825.411433] env[61594]: with excutils.save_and_reraise_exception(): [ 825.411433] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.411433] env[61594]: self.force_reraise() [ 825.411433] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.411433] env[61594]: raise self.value [ 825.411433] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 825.411433] env[61594]: updated_port = self._update_port( [ 825.411433] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.411433] env[61594]: _ensure_no_port_binding_failure(port) [ 825.411433] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.411433] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 825.411433] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. [ 825.411433] env[61594]: Removing descriptor: 20 [ 825.412329] env[61594]: ERROR nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Traceback (most recent call last): [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] yield resources [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self.driver.spawn(context, instance, image_meta, [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self._vmops.spawn(context, instance, image_meta, injected_files, [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] vm_ref = self.build_virtual_machine(instance, [ 825.412329] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] vif_infos = vmwarevif.get_vif_info(self._session, [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] for vif in network_info: [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return self._sync_wrapper(fn, *args, **kwargs) [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self.wait() [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self[:] = self._gt.wait() [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return self._exit_event.wait() [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 825.412691] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] result = hub.switch() [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return self.greenlet.switch() [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] result = function(*args, **kwargs) [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return func(*args, **kwargs) [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] raise e [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] nwinfo = self.network_api.allocate_for_instance( [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] created_port_ids = self._update_ports_for_instance( [ 825.413121] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] with excutils.save_and_reraise_exception(): [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self.force_reraise() [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] raise self.value [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] updated_port = self._update_port( [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] _ensure_no_port_binding_failure(port) [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] raise exception.PortBindingFailed(port_id=port['id']) [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] nova.exception.PortBindingFailed: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. [ 825.413524] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] [ 825.414052] env[61594]: INFO nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Terminating instance [ 825.417624] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.101419] env[61594]: DEBUG nova.network.neutron [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.101976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] Expecting reply to msg eab25e7b2b3244e0ad026bd2fd9d3682 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.120545] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eab25e7b2b3244e0ad026bd2fd9d3682 [ 826.121849] env[61594]: DEBUG oslo_concurrency.lockutils [req-80fecf0f-7b0f-4635-8a20-7fc33e16ccce req-2bb2d582-d819-4502-818d-b02b02ac2a10 service nova] Releasing lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.122386] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.122697] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 826.123302] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 0635bdcec4804ea6a58e25703c606a3c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.139140] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0635bdcec4804ea6a58e25703c606a3c [ 826.231996] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 826.481124] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "bea7629d-c264-457f-b887-443f1ada1e9c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.481845] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "bea7629d-c264-457f-b887-443f1ada1e9c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.482348] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg fdbac9cfcc7e462c8f6240848c395432 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.502498] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdbac9cfcc7e462c8f6240848c395432 [ 826.503340] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 826.504914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg df2980ef08154a21bf1a5e58356b50c8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.546583] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "49d32ccb-b1fd-4640-bc6f-8c08f9df0423" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.547069] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "49d32ccb-b1fd-4640-bc6f-8c08f9df0423" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.547553] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 8e11b7ba0a03430e8a208e0f346116a2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.561260] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e11b7ba0a03430e8a208e0f346116a2 [ 826.561940] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 826.564417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg f86a5cc64b364c79a19151a714c3bad4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.583569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df2980ef08154a21bf1a5e58356b50c8 [ 826.609479] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.609733] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.611277] env[61594]: INFO nova.compute.claims [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.616891] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg e7d91aedebca49a69e0b1dba963fe620 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.618579] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f86a5cc64b364c79a19151a714c3bad4 [ 826.642594] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.662750] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7d91aedebca49a69e0b1dba963fe620 [ 826.664569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg d0bc9d34c58c4830b4ade6fbce2d544f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.679626] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d0bc9d34c58c4830b4ade6fbce2d544f [ 826.740595] env[61594]: ERROR nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. [ 826.740595] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 826.740595] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 826.740595] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 826.740595] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 826.740595] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 826.740595] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 826.740595] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 826.740595] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 826.740595] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 826.740595] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 826.740595] env[61594]: ERROR nova.compute.manager raise self.value [ 826.740595] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 826.740595] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 826.740595] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 826.740595] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 826.741142] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 826.741142] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 826.741142] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. [ 826.741142] env[61594]: ERROR nova.compute.manager [ 826.741142] env[61594]: Traceback (most recent call last): [ 826.741142] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 826.741142] env[61594]: listener.cb(fileno) [ 826.741142] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 826.741142] env[61594]: result = function(*args, **kwargs) [ 826.741142] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 826.741142] env[61594]: return func(*args, **kwargs) [ 826.741142] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 826.741142] env[61594]: raise e [ 826.741142] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 826.741142] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 826.741142] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 826.741142] env[61594]: created_port_ids = self._update_ports_for_instance( [ 826.741142] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 826.741142] env[61594]: with excutils.save_and_reraise_exception(): [ 826.741142] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 826.741142] env[61594]: self.force_reraise() [ 826.741142] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 826.741142] env[61594]: raise self.value [ 826.741142] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 826.741142] env[61594]: updated_port = self._update_port( [ 826.741142] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 826.741142] env[61594]: _ensure_no_port_binding_failure(port) [ 826.741142] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 826.741142] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 826.741950] env[61594]: nova.exception.PortBindingFailed: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. [ 826.741950] env[61594]: Removing descriptor: 22 [ 826.741950] env[61594]: ERROR nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] Traceback (most recent call last): [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] yield resources [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self.driver.spawn(context, instance, image_meta, [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 826.741950] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] vm_ref = self.build_virtual_machine(instance, [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] vif_infos = vmwarevif.get_vif_info(self._session, [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] for vif in network_info: [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return self._sync_wrapper(fn, *args, **kwargs) [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self.wait() [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self[:] = self._gt.wait() [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return self._exit_event.wait() [ 826.742318] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] result = hub.switch() [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return self.greenlet.switch() [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] result = function(*args, **kwargs) [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return func(*args, **kwargs) [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] raise e [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] nwinfo = self.network_api.allocate_for_instance( [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 826.742841] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] created_port_ids = self._update_ports_for_instance( [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] with excutils.save_and_reraise_exception(): [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self.force_reraise() [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] raise self.value [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] updated_port = self._update_port( [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] _ensure_no_port_binding_failure(port) [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 826.743219] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] raise exception.PortBindingFailed(port_id=port['id']) [ 826.743718] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] nova.exception.PortBindingFailed: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. [ 826.743718] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] [ 826.743718] env[61594]: INFO nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Terminating instance [ 826.747292] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.747292] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.747292] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 826.747498] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 59364f114aa14b6d8e0189720100e346 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.756011] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59364f114aa14b6d8e0189720100e346 [ 826.835480] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 826.848897] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7a3374-6100-4c73-b099-15fbee8f4e21 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.859138] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb88cb3-8fb8-45c3-b23d-eadbbec87658 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.892525] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4983d671-6392-42c9-8de3-5ddab7c89d80 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.899777] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-007b5145-d0ac-490a-821a-fdb55f6ee0e8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.914677] env[61594]: DEBUG nova.compute.provider_tree [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 826.915194] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 64d612f1cbea4ee29c074a320ed2f35a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.918406] env[61594]: DEBUG nova.compute.manager [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] [instance: bbd46264-8992-4e56-9896-675500fe587b] Received event network-changed-fbd834cc-c82f-429f-889a-1b197f4186b6 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 826.918594] env[61594]: DEBUG nova.compute.manager [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] [instance: bbd46264-8992-4e56-9896-675500fe587b] Refreshing instance network info cache due to event network-changed-fbd834cc-c82f-429f-889a-1b197f4186b6. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 826.918632] env[61594]: DEBUG oslo_concurrency.lockutils [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] Acquiring lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.924302] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64d612f1cbea4ee29c074a320ed2f35a [ 826.925235] env[61594]: DEBUG nova.scheduler.client.report [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 826.928321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg e003cfc55bee4b08b86e037050c38020 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.943181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e003cfc55bee4b08b86e037050c38020 [ 826.943739] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.334s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 826.944307] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 826.946510] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 7730ab8c0a354279a4befe87dd82f60b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.947405] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.305s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 826.949572] env[61594]: INFO nova.compute.claims [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 826.950700] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg cede4ce4ccd04e448ebbdfda2a5d979e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.996931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7730ab8c0a354279a4befe87dd82f60b [ 826.996931] env[61594]: DEBUG nova.compute.utils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 826.997331] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 2faf29b3b0c84d91a7d357c3f8bde7da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 826.997856] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 826.998050] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 827.000965] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cede4ce4ccd04e448ebbdfda2a5d979e [ 827.003156] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 7341646a5b53422ca839ba36585fd140 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.009488] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2faf29b3b0c84d91a7d357c3f8bde7da [ 827.010740] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 827.012322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 0b898208cf484813b0297725db639533 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.014128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7341646a5b53422ca839ba36585fd140 [ 827.046282] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Successfully created port: 51ba6a99-29fe-431c-bc59-906ac6340e1a {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.065954] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b898208cf484813b0297725db639533 [ 827.068674] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 32ae571639884e16a32a6043c9a0ca91 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.107220] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32ae571639884e16a32a6043c9a0ca91 [ 827.108299] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 827.138522] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.138789] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.138976] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.139534] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.139534] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.139534] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.139719] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.139825] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.139979] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.140185] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.140363] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.141501] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e07c44-3c39-4f4d-8c23-1f060ab1b871 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.153960] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c2e0b1-7d0f-41f9-9495-41f8a062c338 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.173063] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888819b4-7e0b-4866-a6cf-3012ce0b7e1d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.179865] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6e859e-48de-43db-bebf-6f7403340eb8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.214189] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70be0ca-bd70-406f-96c3-d6a6d3ae2805 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.222073] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.222596] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 1b674b51fb1547fb872bca6587ca4251 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.224366] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a4747e-25ad-4aac-adb4-ae43ba53048f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.238991] env[61594]: DEBUG nova.compute.provider_tree [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.239634] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 89c48614708e4859bd1350494b14da8d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.240703] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b674b51fb1547fb872bca6587ca4251 [ 827.241366] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.241741] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 827.241938] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 827.246051] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-56bdf92e-8da1-4c97-a50b-b1260b891403 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.251749] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a516966-3207-48f3-ba82-dfe27476c8eb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.262859] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89c48614708e4859bd1350494b14da8d [ 827.264422] env[61594]: DEBUG nova.scheduler.client.report [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 827.266271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 374302a95e49480eadf0d48809735af6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.274370] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.274370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg acd2c7cf124f42cd97eadc1d4cca0c90 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.279432] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c6c9639-614f-4ffa-a6db-e70ef37b9954 could not be found. [ 827.279660] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 827.279819] env[61594]: INFO nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Took 0.04 seconds to destroy the instance on the hypervisor. [ 827.280077] env[61594]: DEBUG oslo.service.loopingcall [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.280584] env[61594]: DEBUG nova.compute.manager [-] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 827.280637] env[61594]: DEBUG nova.network.neutron [-] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 827.283809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 374302a95e49480eadf0d48809735af6 [ 827.284603] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.285078] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 827.287061] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg a262031f038d4e78a6b5b355c709bab3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.288150] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg acd2c7cf124f42cd97eadc1d4cca0c90 [ 827.288640] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.289014] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 827.289203] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 827.289664] env[61594]: DEBUG oslo_concurrency.lockutils [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] Acquired lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.289843] env[61594]: DEBUG nova.network.neutron [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] [instance: bbd46264-8992-4e56-9896-675500fe587b] Refreshing network info cache for port fbd834cc-c82f-429f-889a-1b197f4186b6 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 827.290231] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] Expecting reply to msg 8cb7bb41fb444f8d8757b741f16dedc1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.291544] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-317c73f2-35fe-47f2-9d8d-627bc53706a6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.301857] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b947efc7-925c-421f-85fb-553e19961760 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.314347] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8cb7bb41fb444f8d8757b741f16dedc1 [ 827.316249] env[61594]: DEBUG nova.policy [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '185a047edb6241d59feb8ad5d2bbc749', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '201633229817424689c8fe5557911ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 827.332401] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a262031f038d4e78a6b5b355c709bab3 [ 827.333135] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bbd46264-8992-4e56-9896-675500fe587b could not be found. [ 827.333368] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 827.333552] env[61594]: INFO nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 827.333821] env[61594]: DEBUG oslo.service.loopingcall [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.334930] env[61594]: DEBUG nova.compute.utils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 827.335568] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg bc41c24207764a72a4d70cf4cc824364 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.336541] env[61594]: DEBUG nova.compute.manager [-] [instance: bbd46264-8992-4e56-9896-675500fe587b] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 827.336640] env[61594]: DEBUG nova.network.neutron [-] [instance: bbd46264-8992-4e56-9896-675500fe587b] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 827.338863] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 827.338863] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 827.346254] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc41c24207764a72a4d70cf4cc824364 [ 827.347140] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 827.348768] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 5c526921dbb14be58d349f22ed8350b0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.361684] env[61594]: DEBUG nova.network.neutron [-] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.362129] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 17997b10be1a429eb1d31532b051fc1f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.371026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 17997b10be1a429eb1d31532b051fc1f [ 827.371706] env[61594]: DEBUG nova.network.neutron [-] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.372086] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 699415c9083a461294844928a7a5a95d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.393541] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c526921dbb14be58d349f22ed8350b0 [ 827.396375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg ed833a5f7df34ff1bf531f335f241e81 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.401019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 699415c9083a461294844928a7a5a95d [ 827.401019] env[61594]: INFO nova.compute.manager [-] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Took 0.12 seconds to deallocate network for instance. [ 827.401995] env[61594]: DEBUG nova.compute.claims [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 827.402361] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.402437] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.404732] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg c48e078a3f5c442e991d125086f6a865 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.406826] env[61594]: DEBUG nova.network.neutron [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.427331] env[61594]: DEBUG nova.network.neutron [-] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.427918] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8e49245c03564844bf0641a0ca3ef0d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.442122] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8e49245c03564844bf0641a0ca3ef0d5 [ 827.443237] env[61594]: DEBUG nova.network.neutron [-] [instance: bbd46264-8992-4e56-9896-675500fe587b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.444016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg be8067502bb2463490a5ed76c51d8580 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.449621] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed833a5f7df34ff1bf531f335f241e81 [ 827.450571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c48e078a3f5c442e991d125086f6a865 [ 827.452282] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 827.464979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be8067502bb2463490a5ed76c51d8580 [ 827.464979] env[61594]: INFO nova.compute.manager [-] [instance: bbd46264-8992-4e56-9896-675500fe587b] Took 0.13 seconds to deallocate network for instance. [ 827.467183] env[61594]: DEBUG nova.compute.claims [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 827.467277] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 827.501899] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 827.502209] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 827.502813] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 827.502813] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 827.502813] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 827.502813] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 827.503138] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 827.503206] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 827.503377] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 827.503556] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 827.503836] env[61594]: DEBUG nova.virt.hardware [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 827.504813] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e4fbe8a-8a94-4dc8-a30d-484046d29e33 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.517130] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8783d3d6-8e4a-4c8d-bdc2-b0441e6243d9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.627154] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30116a6c-0e33-422d-964b-5c2203008586 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.636022] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa885fc6-53ab-4a57-ba7a-c959334b8c87 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.667308] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920bd9f8-9867-42b2-a38a-8050f7a6ac66 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.676590] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a5830f-b21e-4481-8763-3b6218f0314d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.684456] env[61594]: DEBUG nova.policy [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '185a047edb6241d59feb8ad5d2bbc749', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '201633229817424689c8fe5557911ae9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 827.698153] env[61594]: DEBUG nova.compute.provider_tree [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.698153] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 3a50f9ca03bc433ca51151a1333e4485 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.714515] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a50f9ca03bc433ca51151a1333e4485 [ 827.714515] env[61594]: DEBUG nova.scheduler.client.report [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 827.716779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 55ae0cf2159448fe94cd8750af7ae035 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.737269] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55ae0cf2159448fe94cd8750af7ae035 [ 827.737998] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.335s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.738697] env[61594]: ERROR nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Traceback (most recent call last): [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self.driver.spawn(context, instance, image_meta, [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] vm_ref = self.build_virtual_machine(instance, [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.738697] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] for vif in network_info: [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return self._sync_wrapper(fn, *args, **kwargs) [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self.wait() [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self[:] = self._gt.wait() [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return self._exit_event.wait() [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] result = hub.switch() [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 827.739064] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return self.greenlet.switch() [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] result = function(*args, **kwargs) [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] return func(*args, **kwargs) [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] raise e [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] nwinfo = self.network_api.allocate_for_instance( [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] created_port_ids = self._update_ports_for_instance( [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] with excutils.save_and_reraise_exception(): [ 827.739462] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] self.force_reraise() [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] raise self.value [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] updated_port = self._update_port( [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] _ensure_no_port_binding_failure(port) [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] raise exception.PortBindingFailed(port_id=port['id']) [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] nova.exception.PortBindingFailed: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. [ 827.739829] env[61594]: ERROR nova.compute.manager [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] [ 827.740171] env[61594]: DEBUG nova.compute.utils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 827.742545] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.275s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.744453] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2712b2363d3e45bb96d8ef0d83a262d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.746076] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Build of instance 9c6c9639-614f-4ffa-a6db-e70ef37b9954 was re-scheduled: Binding failed for port 55011e89-cff6-4abb-9a03-2344a1163928, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 827.746525] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 827.746737] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquiring lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.747150] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Acquired lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.747150] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 827.747451] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg ca418a23221c433ca55b08a645fa9998 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 827.756035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca418a23221c433ca55b08a645fa9998 [ 827.790115] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2712b2363d3e45bb96d8ef0d83a262d8 [ 827.840409] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.942669] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c16e508-dbc9-4e6a-860d-d3888e11d737 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.951082] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7ebad3-b5cb-47e4-8426-868240ac782d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.981818] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f5a4f8-bbd2-4a14-9620-85fe70c2bb5d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.989838] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79dea17e-486b-462f-8e7f-4c8a78b6377e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.003840] env[61594]: DEBUG nova.compute.provider_tree [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 828.004374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 85c8694f51e749d6ab6ab092f2e75cc6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.014400] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85c8694f51e749d6ab6ab092f2e75cc6 [ 828.015505] env[61594]: DEBUG nova.scheduler.client.report [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 828.018345] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg cd7aa02ae0b94532abc9dd580a890696 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.030282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd7aa02ae0b94532abc9dd580a890696 [ 828.031158] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.289s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 828.031800] env[61594]: ERROR nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] Traceback (most recent call last): [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self.driver.spawn(context, instance, image_meta, [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] vm_ref = self.build_virtual_machine(instance, [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] vif_infos = vmwarevif.get_vif_info(self._session, [ 828.031800] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] for vif in network_info: [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return self._sync_wrapper(fn, *args, **kwargs) [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self.wait() [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self[:] = self._gt.wait() [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return self._exit_event.wait() [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] result = hub.switch() [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 828.032682] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return self.greenlet.switch() [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] result = function(*args, **kwargs) [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] return func(*args, **kwargs) [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] raise e [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] nwinfo = self.network_api.allocate_for_instance( [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] created_port_ids = self._update_ports_for_instance( [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] with excutils.save_and_reraise_exception(): [ 828.033308] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] self.force_reraise() [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] raise self.value [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] updated_port = self._update_port( [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] _ensure_no_port_binding_failure(port) [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] raise exception.PortBindingFailed(port_id=port['id']) [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] nova.exception.PortBindingFailed: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. [ 828.033888] env[61594]: ERROR nova.compute.manager [instance: bbd46264-8992-4e56-9896-675500fe587b] [ 828.034400] env[61594]: DEBUG nova.compute.utils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 828.034400] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Build of instance bbd46264-8992-4e56-9896-675500fe587b was re-scheduled: Binding failed for port fbd834cc-c82f-429f-889a-1b197f4186b6, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 828.034643] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 828.034853] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 828.219423] env[61594]: DEBUG nova.network.neutron [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] [instance: bbd46264-8992-4e56-9896-675500fe587b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.219423] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] Expecting reply to msg 8b5fe83ce020438cbf864c90db2ce6f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.229594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b5fe83ce020438cbf864c90db2ce6f4 [ 828.230606] env[61594]: DEBUG oslo_concurrency.lockutils [req-4fdc3611-f8c4-4971-8304-2cb11c91a085 req-44abb85c-cfc9-44dc-bd17-6c5d890763cf service nova] Releasing lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.230984] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.231186] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 828.231641] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2e069bdc93f74b49b05c2ca91e32aa71 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.238430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e069bdc93f74b49b05c2ca91e32aa71 [ 828.322696] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.623263] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.623842] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 924f77fd7f7546e3b1933c4478c6e28a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.644128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 924f77fd7f7546e3b1933c4478c6e28a [ 828.644833] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Releasing lock "refresh_cache-9c6c9639-614f-4ffa-a6db-e70ef37b9954" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.645243] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 828.645243] env[61594]: DEBUG nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 828.645399] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 828.758207] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.758796] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 1b98e7a0ec0842bbacafe4eb81efbea6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.772865] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1b98e7a0ec0842bbacafe4eb81efbea6 [ 828.774117] env[61594]: DEBUG nova.network.neutron [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.774117] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg a364742c4c4e4b338e97f5daf5cf8cd3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.785223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a364742c4c4e4b338e97f5daf5cf8cd3 [ 828.785852] env[61594]: INFO nova.compute.manager [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] [instance: 9c6c9639-614f-4ffa-a6db-e70ef37b9954] Took 0.14 seconds to deallocate network for instance. [ 828.787524] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 461a68067adf454faac6f6250ba3d671 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.838095] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 461a68067adf454faac6f6250ba3d671 [ 828.841578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg e58c48f0fb00488bb17b8e68045bc053 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.878957] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e58c48f0fb00488bb17b8e68045bc053 [ 828.925010] env[61594]: INFO nova.scheduler.client.report [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Deleted allocations for instance 9c6c9639-614f-4ffa-a6db-e70ef37b9954 [ 828.930173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Expecting reply to msg 99a73f3141ff43d9b41cff0ec625ba39 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 828.949686] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99a73f3141ff43d9b41cff0ec625ba39 [ 828.950454] env[61594]: DEBUG oslo_concurrency.lockutils [None req-bc4e3c3b-6f00-4137-aa0a-06075495cf98 tempest-ImagesTestJSON-873357144 tempest-ImagesTestJSON-873357144-project-member] Lock "9c6c9639-614f-4ffa-a6db-e70ef37b9954" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.073s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.029252] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.029948] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2d0cd138bcba462d8a33d58a970c512c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.045412] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d0cd138bcba462d8a33d58a970c512c [ 829.046211] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-bbd46264-8992-4e56-9896-675500fe587b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 829.046826] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 829.047151] env[61594]: DEBUG nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 829.047377] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 829.166673] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "cb22b0a4-bcef-4964-bd63-5abb2789cedd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.166927] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "cb22b0a4-bcef-4964-bd63-5abb2789cedd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.167417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg fb5664ae51c64b77b38faaeee8c756a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.175758] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb5664ae51c64b77b38faaeee8c756a5 [ 829.176520] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 829.178263] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f04f1e4c8a7c4e429a3a23ea56934975 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.223088] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f04f1e4c8a7c4e429a3a23ea56934975 [ 829.239872] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 829.240146] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 829.241630] env[61594]: INFO nova.compute.claims [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 829.243619] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f3293c7ff38640ca896f3af26e598534 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.280683] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3293c7ff38640ca896f3af26e598534 [ 829.282452] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg fa21b2ebc45344809036c21623548d18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.290622] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa21b2ebc45344809036c21623548d18 [ 829.306012] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 829.306767] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c204bc395ed84dfd92cff3f657e72c48 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.316948] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c204bc395ed84dfd92cff3f657e72c48 [ 829.316948] env[61594]: DEBUG nova.network.neutron [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.316948] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ebaf44fd1aa74641867880efd17b5d90 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.328244] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebaf44fd1aa74641867880efd17b5d90 [ 829.328843] env[61594]: INFO nova.compute.manager [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: bbd46264-8992-4e56-9896-675500fe587b] Took 0.28 seconds to deallocate network for instance. [ 829.330712] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 598b0470aed74e43b1865ecc204bef4f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.367827] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 598b0470aed74e43b1865ecc204bef4f [ 829.370619] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 21b820295911442890a0fb071b238093 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.418809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 21b820295911442890a0fb071b238093 [ 829.441627] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4670943-470e-4a3b-a457-e303f7e13add {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.451431] env[61594]: INFO nova.scheduler.client.report [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Deleted allocations for instance bbd46264-8992-4e56-9896-675500fe587b [ 829.460025] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37e5631-a625-4ac4-acf0-05bf7f023e60 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.462929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg e6c1d8c4d8fc422d8d81a9423a79dab3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.496744] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6c1d8c4d8fc422d8d81a9423a79dab3 [ 829.497626] env[61594]: DEBUG oslo_concurrency.lockutils [None req-e4e8ec04-7676-499a-bfd8-d29b512d5166 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "bbd46264-8992-4e56-9896-675500fe587b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.945s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.498675] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e928031b-26a5-429a-acf0-f59d8973e1bd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.507833] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60522f4b-cc4a-467d-a78b-32271bcbcf5f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.522611] env[61594]: DEBUG nova.compute.provider_tree [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.523137] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 252d42e170e344a799feb95649f25902 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.531640] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 252d42e170e344a799feb95649f25902 [ 829.532561] env[61594]: DEBUG nova.scheduler.client.report [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 829.534814] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 1209b95e7e294e8ba0795a7920c863a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.549280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1209b95e7e294e8ba0795a7920c863a4 [ 829.550076] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.310s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.550703] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 829.552356] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 414446ef90be403c88f4cd3bcb480832 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.589863] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 414446ef90be403c88f4cd3bcb480832 [ 829.591561] env[61594]: DEBUG nova.compute.utils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 829.592200] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f1f67d82e61d46aca5124ca858d54563 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.593276] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 829.593480] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 829.616743] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f1f67d82e61d46aca5124ca858d54563 [ 829.618024] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 829.619785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg aba6ac79f7814ade8d31cb8b712de5e6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.663135] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aba6ac79f7814ade8d31cb8b712de5e6 [ 829.665845] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ba8f75b4631644f68d00d7e1130d380a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 829.705616] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba8f75b4631644f68d00d7e1130d380a [ 829.706498] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 829.746155] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 829.746424] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 829.746586] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.746766] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 829.747038] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.747116] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 829.747354] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 829.747519] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 829.747690] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 829.747857] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 829.748043] env[61594]: DEBUG nova.virt.hardware [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 829.749024] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e111c1e-767e-4d63-b68c-6c3dec2a2b0d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.757861] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac68a679-5c44-465b-8f19-0adf0fde5c26 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.878900] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Successfully created port: 18c8641b-7198-4e85-a2b7-ddf8a76c3639 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.894825] env[61594]: DEBUG nova.policy [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '789177a2f7be455cadec45cf03d67521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dfb77f12805418eaa6127fc75becec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 830.212019] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Successfully created port: 24404798-6dd9-490d-817a-2a8df00a98ed {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 830.216889] env[61594]: ERROR nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. [ 830.216889] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 830.216889] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 830.216889] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 830.216889] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.216889] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 830.216889] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.216889] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 830.216889] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.216889] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 830.216889] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.216889] env[61594]: ERROR nova.compute.manager raise self.value [ 830.216889] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.216889] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 830.216889] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.216889] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 830.217422] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.217422] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 830.217422] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. [ 830.217422] env[61594]: ERROR nova.compute.manager [ 830.217875] env[61594]: Traceback (most recent call last): [ 830.219633] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 830.219633] env[61594]: listener.cb(fileno) [ 830.219633] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 830.219633] env[61594]: result = function(*args, **kwargs) [ 830.219633] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 830.219633] env[61594]: return func(*args, **kwargs) [ 830.219633] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 830.219633] env[61594]: raise e [ 830.219633] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 830.219633] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 830.219633] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.219633] env[61594]: created_port_ids = self._update_ports_for_instance( [ 830.219633] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.219633] env[61594]: with excutils.save_and_reraise_exception(): [ 830.219633] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.219633] env[61594]: self.force_reraise() [ 830.219633] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.219633] env[61594]: raise self.value [ 830.219633] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.219633] env[61594]: updated_port = self._update_port( [ 830.219633] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.219633] env[61594]: _ensure_no_port_binding_failure(port) [ 830.219633] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.219633] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 830.219633] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. [ 830.219633] env[61594]: Removing descriptor: 23 [ 830.220541] env[61594]: ERROR nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Traceback (most recent call last): [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] yield resources [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self.driver.spawn(context, instance, image_meta, [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] vm_ref = self.build_virtual_machine(instance, [ 830.220541] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] vif_infos = vmwarevif.get_vif_info(self._session, [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] for vif in network_info: [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return self._sync_wrapper(fn, *args, **kwargs) [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self.wait() [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self[:] = self._gt.wait() [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return self._exit_event.wait() [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 830.220946] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] result = hub.switch() [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return self.greenlet.switch() [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] result = function(*args, **kwargs) [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return func(*args, **kwargs) [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] raise e [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] nwinfo = self.network_api.allocate_for_instance( [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] created_port_ids = self._update_ports_for_instance( [ 830.221366] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] with excutils.save_and_reraise_exception(): [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self.force_reraise() [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] raise self.value [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] updated_port = self._update_port( [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] _ensure_no_port_binding_failure(port) [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] raise exception.PortBindingFailed(port_id=port['id']) [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] nova.exception.PortBindingFailed: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. [ 830.221851] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] [ 830.222238] env[61594]: INFO nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Terminating instance [ 830.224012] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.227186] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquired lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.227186] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 830.227186] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 9eab22651d9245bba96934e5feb9ebfa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.237076] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9eab22651d9245bba96934e5feb9ebfa [ 830.305631] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 830.464484] env[61594]: DEBUG nova.compute.manager [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Received event network-changed-64a4ae94-4388-4b11-ab1e-f234740d4680 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 830.464798] env[61594]: DEBUG nova.compute.manager [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Refreshing instance network info cache due to event network-changed-64a4ae94-4388-4b11-ab1e-f234740d4680. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 830.465435] env[61594]: DEBUG oslo_concurrency.lockutils [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] Acquiring lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 830.507041] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Acquiring lock "ad2881a4-9715-40ed-8489-85a6a575fb30" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.507353] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Lock "ad2881a4-9715-40ed-8489-85a6a575fb30" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.508145] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 644f04fcb2c84e3f95a40cf9246a0540 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.526349] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 644f04fcb2c84e3f95a40cf9246a0540 [ 830.527308] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 830.530058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 9cb6d2d00f6a40dfb056114190ee5c23 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.582764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9cb6d2d00f6a40dfb056114190ee5c23 [ 830.604607] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.604988] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.607391] env[61594]: INFO nova.compute.claims [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.609123] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg f48d8cc8b46d4b3fb65650ea57d3cf19 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.675041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f48d8cc8b46d4b3fb65650ea57d3cf19 [ 830.676998] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 33061648370b4919bf29157e15403eb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.688551] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33061648370b4919bf29157e15403eb4 [ 830.834930] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcf9981-e29f-41fd-ab7b-4d9a5df42df8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.842293] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6d20cf-dd9e-4e54-b608-fb6f6c888488 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.881799] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4cb589-97fb-459f-8dda-ce0e547af4ca {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.887302] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.887874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 48f27c6fa6e54378a0ab91d186dd7e30 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.892253] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34be6ac7-8ef7-4c84-9500-f49551b74737 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.906006] env[61594]: DEBUG nova.compute.provider_tree [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.906495] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 0b5fcd95da0b456e90690a6b630c87a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.907558] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 48f27c6fa6e54378a0ab91d186dd7e30 [ 830.908124] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Releasing lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 830.908569] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 830.908684] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 830.909164] env[61594]: DEBUG oslo_concurrency.lockutils [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] Acquired lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.909340] env[61594]: DEBUG nova.network.neutron [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Refreshing network info cache for port 64a4ae94-4388-4b11-ab1e-f234740d4680 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 830.909724] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] Expecting reply to msg 9b7b715cd093491e918f652ca03b1ae3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.910756] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af4e56f7-b635-4471-8276-d58d4d284d47 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.917228] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b5fcd95da0b456e90690a6b630c87a4 [ 830.918387] env[61594]: DEBUG nova.scheduler.client.report [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 830.920352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 78b5fe56e5d24355befe104af3b658f7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.921316] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b7b715cd093491e918f652ca03b1ae3 [ 830.928609] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebd88eb-f9dd-4388-bf3e-5afc2f6cc3c8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.941757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 78b5fe56e5d24355befe104af3b658f7 [ 830.943148] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.338s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.943682] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 830.945344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg cd6b314ef7e04f3d90e4e2c737cb3904 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.958811] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 848010fd-76ed-43d9-8d74-62b09062a2b6 could not be found. [ 830.958811] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 830.958967] env[61594]: INFO nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Took 0.05 seconds to destroy the instance on the hypervisor. [ 830.959374] env[61594]: DEBUG oslo.service.loopingcall [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 830.959545] env[61594]: DEBUG nova.compute.manager [-] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 830.960175] env[61594]: DEBUG nova.network.neutron [-] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 830.993657] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd6b314ef7e04f3d90e4e2c737cb3904 [ 830.994935] env[61594]: DEBUG nova.compute.utils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.995745] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 81454a65d91f41a0a178837d90202d5f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 830.996756] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 830.997459] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 831.013710] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81454a65d91f41a0a178837d90202d5f [ 831.015636] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 831.016136] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg e68fd293c15a41f59b4523807ca94ecc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.054374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e68fd293c15a41f59b4523807ca94ecc [ 831.054374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg c4d362f75b2d48a5847125e8a513a61a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.068058] env[61594]: DEBUG nova.network.neutron [-] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.068593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 620e1540934049f7abedd6b4b0afc490 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.079219] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 620e1540934049f7abedd6b4b0afc490 [ 831.079219] env[61594]: DEBUG nova.network.neutron [-] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.079219] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9e466b6f51a84005ba8aab062def4a35 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.094332] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9e466b6f51a84005ba8aab062def4a35 [ 831.094332] env[61594]: INFO nova.compute.manager [-] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Took 0.13 seconds to deallocate network for instance. [ 831.098185] env[61594]: DEBUG nova.compute.claims [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 831.098715] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.100140] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.103580] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 93ac1eb7ad5f41f7963be234a49f24cc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.109778] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4d362f75b2d48a5847125e8a513a61a [ 831.110565] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 831.141323] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 831.141584] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 831.141746] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 831.141949] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 831.142409] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 831.142409] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 831.142707] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 831.142933] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 831.143251] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 831.143425] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 831.143657] env[61594]: DEBUG nova.virt.hardware [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 831.144833] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf592367-6ce8-432f-8f00-b3008c735588 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.147954] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 93ac1eb7ad5f41f7963be234a49f24cc [ 831.157114] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9fb1cf-9c59-4860-8be7-449cf929a051 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.270015] env[61594]: DEBUG nova.network.neutron [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 831.297584] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6623df64-6c35-498c-9d93-f47c169324c9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.304094] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccb66e1-0fbd-4125-b00b-c212eb2fc305 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.336980] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08b6dd84-deab-4df4-8171-e7f36d34e720 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.345311] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9befbc-1f08-48ec-a78f-4140c952bfd5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.364012] env[61594]: DEBUG nova.compute.provider_tree [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.364513] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 633bccd86dbd4ab0b5bd0d0cc2cf6ada in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.372832] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 633bccd86dbd4ab0b5bd0d0cc2cf6ada [ 831.374167] env[61594]: DEBUG nova.scheduler.client.report [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 831.376984] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 99635ba795634acd89baca578979f813 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.399123] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99635ba795634acd89baca578979f813 [ 831.400184] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.301s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.400841] env[61594]: ERROR nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Traceback (most recent call last): [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self.driver.spawn(context, instance, image_meta, [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] vm_ref = self.build_virtual_machine(instance, [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] vif_infos = vmwarevif.get_vif_info(self._session, [ 831.400841] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] for vif in network_info: [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return self._sync_wrapper(fn, *args, **kwargs) [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self.wait() [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self[:] = self._gt.wait() [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return self._exit_event.wait() [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] result = hub.switch() [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 831.401401] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return self.greenlet.switch() [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] result = function(*args, **kwargs) [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] return func(*args, **kwargs) [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] raise e [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] nwinfo = self.network_api.allocate_for_instance( [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] created_port_ids = self._update_ports_for_instance( [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] with excutils.save_and_reraise_exception(): [ 831.402035] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] self.force_reraise() [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] raise self.value [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] updated_port = self._update_port( [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] _ensure_no_port_binding_failure(port) [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] raise exception.PortBindingFailed(port_id=port['id']) [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] nova.exception.PortBindingFailed: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. [ 831.402430] env[61594]: ERROR nova.compute.manager [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] [ 831.402775] env[61594]: DEBUG nova.compute.utils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 831.406129] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Build of instance 848010fd-76ed-43d9-8d74-62b09062a2b6 was re-scheduled: Binding failed for port 64a4ae94-4388-4b11-ab1e-f234740d4680, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 831.406129] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 831.406129] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquiring lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 831.473835] env[61594]: DEBUG nova.policy [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcdf7d6ebc814fc0b579eb2b98ef5a92', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '63bee6818d034676b755bb90af6015a2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 831.564882] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "d364fc2e-89d7-4b2e-a510-19148a8f1a2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.565199] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "d364fc2e-89d7-4b2e-a510-19148a8f1a2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.565714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg b0fe20ef358d47eda0ed521e134493bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.584607] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0fe20ef358d47eda0ed521e134493bb [ 831.585442] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 831.587285] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg f9dd75419dab450aa59be9e215c5ee74 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.662206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f9dd75419dab450aa59be9e215c5ee74 [ 831.697275] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 831.697275] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 831.698607] env[61594]: INFO nova.compute.claims [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 831.700173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg b88bd9bbdfc14a6c817a0226b37a64d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.748753] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b88bd9bbdfc14a6c817a0226b37a64d5 [ 831.751193] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg f6fce835f4b14b939169a6ebe2409aa6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.761748] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6fce835f4b14b939169a6ebe2409aa6 [ 831.912033] env[61594]: DEBUG nova.network.neutron [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 831.914079] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] Expecting reply to msg 0aadedb079844bbab1af875d07d54b89 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.928065] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0aadedb079844bbab1af875d07d54b89 [ 831.928422] env[61594]: DEBUG oslo_concurrency.lockutils [req-20e493e7-11c5-4332-bacc-f421d6313fb1 req-a9c21473-a687-4ab7-ba66-d95bb8f33665 service nova] Releasing lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 831.928795] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Acquired lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.928982] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 831.929417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 9b65848933c2463189e71cf030aed2cb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 831.937400] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b65848933c2463189e71cf030aed2cb [ 831.940969] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5b0db9-4130-45db-a004-b11b424bed9d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.951039] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1b804e-6dec-47e6-8d38-ece97bff48d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.984033] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a88759-bd52-4e79-b778-798f9b5653ce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.991618] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed3d0dc-d14a-4b37-8f01-0c5f1b442818 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.005702] env[61594]: DEBUG nova.compute.provider_tree [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 832.006375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg dc7a57767eec435bad42709e9febac0b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.023695] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.026387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dc7a57767eec435bad42709e9febac0b [ 832.027840] env[61594]: DEBUG nova.scheduler.client.report [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 832.030086] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg ef78b330e499469294630f0fdf755453 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.044953] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef78b330e499469294630f0fdf755453 [ 832.046174] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.349s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.046284] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 832.047877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c20a62cd3c704d5e82815abd0f010f73 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.088078] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c20a62cd3c704d5e82815abd0f010f73 [ 832.089448] env[61594]: DEBUG nova.compute.utils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 832.090815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 02d06b53b0fb4adfbcc914f116c849f5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.090933] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 832.107075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02d06b53b0fb4adfbcc914f116c849f5 [ 832.107726] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 832.109414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c2bd1b6e09ee4c659995c32baadc233f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.142712] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2bd1b6e09ee4c659995c32baadc233f [ 832.146786] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c1527d5baaf84493a3ed87b42d7e10ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.178099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c1527d5baaf84493a3ed87b42d7e10ed [ 832.179619] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 832.210703] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.210951] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.211120] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.211302] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.211471] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.211631] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.211839] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.212327] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.212403] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.212574] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.212722] env[61594]: DEBUG nova.virt.hardware [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.213589] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8545b617-ff15-4cca-b541-21aba87ed467 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.223857] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe7563b-b0fb-476e-8c50-66b9f75c8327 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.240638] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.248331] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating folder: Project (a371901b35c94dd6a97437f6d2f0ab57). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 832.248574] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d053c507-5f51-47f3-b11b-83adfbf04f09 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.259514] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Created folder: Project (a371901b35c94dd6a97437f6d2f0ab57) in parent group-v277030. [ 832.259514] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating folder: Instances. Parent ref: group-v277050. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 832.259644] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47b3530a-e647-4723-bbd8-0951045c92bf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.268387] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Created folder: Instances in parent group-v277050. [ 832.268387] env[61594]: DEBUG oslo.service.loopingcall [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 832.268525] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 832.268662] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4998b3c3-dd4f-486f-a74f-761560090ac5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.285447] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.285447] env[61594]: value = "task-1291412" [ 832.285447] env[61594]: _type = "Task" [ 832.285447] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.292855] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291412, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.348954] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Successfully created port: 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.796476] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291412, 'name': CreateVM_Task, 'duration_secs': 0.279018} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.796629] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 832.797031] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.797190] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.797504] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 832.797751] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f07a9a5a-48de-4942-a2e2-21028fa101d6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.802310] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 832.802310] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]527f50e2-9f9c-b782-2ede-09f8149caf81" [ 832.802310] env[61594]: _type = "Task" [ 832.802310] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.810382] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]527f50e2-9f9c-b782-2ede-09f8149caf81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.871707] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.872330] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 924acb402cfe49e69c2d518ecf442d48 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.885787] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 924acb402cfe49e69c2d518ecf442d48 [ 832.886714] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Releasing lock "refresh_cache-848010fd-76ed-43d9-8d74-62b09062a2b6" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 832.886714] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 832.887212] env[61594]: DEBUG nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 832.887212] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 832.951117] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.951762] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg c4f273b9e6f147458d3b0faf623b6f2f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.959645] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4f273b9e6f147458d3b0faf623b6f2f [ 832.960939] env[61594]: DEBUG nova.network.neutron [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.961569] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg a20adcf964b24a47b525680a13a429dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 832.970225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a20adcf964b24a47b525680a13a429dd [ 832.970861] env[61594]: INFO nova.compute.manager [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] [instance: 848010fd-76ed-43d9-8d74-62b09062a2b6] Took 0.08 seconds to deallocate network for instance. [ 832.972685] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg e494823555534660a5ffddc4ff02e32f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.012599] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e494823555534660a5ffddc4ff02e32f [ 833.015707] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 4911c66723994dd1949bddfd572ee7b2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.059715] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4911c66723994dd1949bddfd572ee7b2 [ 833.086285] env[61594]: DEBUG nova.compute.manager [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Received event network-changed-87f8c98f-4a27-42fc-b13f-d8e6af9b323a {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 833.086496] env[61594]: DEBUG nova.compute.manager [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Refreshing instance network info cache due to event network-changed-87f8c98f-4a27-42fc-b13f-d8e6af9b323a. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 833.086709] env[61594]: DEBUG oslo_concurrency.lockutils [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] Acquiring lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.086852] env[61594]: DEBUG oslo_concurrency.lockutils [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] Acquired lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.088270] env[61594]: DEBUG nova.network.neutron [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Refreshing network info cache for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 833.090227] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] Expecting reply to msg 57060d8f4a3b438aa534e9e71bce0262 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.097689] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57060d8f4a3b438aa534e9e71bce0262 [ 833.100776] env[61594]: INFO nova.scheduler.client.report [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Deleted allocations for instance 848010fd-76ed-43d9-8d74-62b09062a2b6 [ 833.109802] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Expecting reply to msg 9b37addc470446afa43c3038b8cb1dde in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.127113] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b37addc470446afa43c3038b8cb1dde [ 833.127113] env[61594]: DEBUG oslo_concurrency.lockutils [None req-21091e0f-affd-42c7-b11a-5c1c6972e824 tempest-AttachInterfacesTestJSON-2048770672 tempest-AttachInterfacesTestJSON-2048770672-project-member] Lock "848010fd-76ed-43d9-8d74-62b09062a2b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.786s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 833.157696] env[61594]: ERROR nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. [ 833.157696] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 833.157696] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 833.157696] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 833.157696] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 833.157696] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 833.157696] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 833.157696] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 833.157696] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.157696] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 833.157696] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.157696] env[61594]: ERROR nova.compute.manager raise self.value [ 833.157696] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 833.157696] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 833.157696] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.157696] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 833.158489] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.158489] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 833.158489] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. [ 833.158489] env[61594]: ERROR nova.compute.manager [ 833.158489] env[61594]: Traceback (most recent call last): [ 833.158489] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 833.158489] env[61594]: listener.cb(fileno) [ 833.158489] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 833.158489] env[61594]: result = function(*args, **kwargs) [ 833.158489] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 833.158489] env[61594]: return func(*args, **kwargs) [ 833.158489] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 833.158489] env[61594]: raise e [ 833.158489] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 833.158489] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 833.158489] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 833.158489] env[61594]: created_port_ids = self._update_ports_for_instance( [ 833.158489] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 833.158489] env[61594]: with excutils.save_and_reraise_exception(): [ 833.158489] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.158489] env[61594]: self.force_reraise() [ 833.158489] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.158489] env[61594]: raise self.value [ 833.158489] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 833.158489] env[61594]: updated_port = self._update_port( [ 833.158489] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.158489] env[61594]: _ensure_no_port_binding_failure(port) [ 833.158489] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.158489] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 833.159438] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. [ 833.159438] env[61594]: Removing descriptor: 17 [ 833.159438] env[61594]: ERROR nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Traceback (most recent call last): [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] yield resources [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self.driver.spawn(context, instance, image_meta, [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 833.159438] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] vm_ref = self.build_virtual_machine(instance, [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] vif_infos = vmwarevif.get_vif_info(self._session, [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] for vif in network_info: [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return self._sync_wrapper(fn, *args, **kwargs) [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self.wait() [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self[:] = self._gt.wait() [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return self._exit_event.wait() [ 833.159823] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] result = hub.switch() [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return self.greenlet.switch() [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] result = function(*args, **kwargs) [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return func(*args, **kwargs) [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] raise e [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] nwinfo = self.network_api.allocate_for_instance( [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 833.160205] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] created_port_ids = self._update_ports_for_instance( [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] with excutils.save_and_reraise_exception(): [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self.force_reraise() [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] raise self.value [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] updated_port = self._update_port( [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] _ensure_no_port_binding_failure(port) [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 833.160640] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] raise exception.PortBindingFailed(port_id=port['id']) [ 833.161901] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] nova.exception.PortBindingFailed: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. [ 833.161901] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] [ 833.161901] env[61594]: INFO nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Terminating instance [ 833.162625] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Acquiring lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.165265] env[61594]: DEBUG nova.network.neutron [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.319376] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.319671] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.319946] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 833.488365] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Successfully created port: 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.647617] env[61594]: DEBUG nova.network.neutron [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.647617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] Expecting reply to msg 50364cff5e544c8c9ea1d2dcb894f3f6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.666387] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50364cff5e544c8c9ea1d2dcb894f3f6 [ 833.668340] env[61594]: DEBUG oslo_concurrency.lockutils [req-5fd38f2b-5963-4a30-bac2-6b1720908d38 req-796f8d80-79d3-4461-95d9-09e2b9e8c728 service nova] Releasing lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.668340] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Acquired lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.668340] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.668794] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg cb32485900144225a22b00ce1ab3e01d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.694804] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb32485900144225a22b00ce1ab3e01d [ 833.806857] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "dc31ebf5-889b-438b-9f54-6df807714a38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.806857] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "dc31ebf5-889b-438b-9f54-6df807714a38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.806857] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg eecfe6e6a6df4cb08fbd5037497f2444 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.820354] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eecfe6e6a6df4cb08fbd5037497f2444 [ 833.823011] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 833.824862] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg d73134c7d1e14508935d5fef7af0a9a3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.832882] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.864372] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d73134c7d1e14508935d5fef7af0a9a3 [ 833.900322] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 833.900740] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 833.904292] env[61594]: INFO nova.compute.claims [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 833.905024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 23dac1fe3c134426949b9653b48b88c0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.957811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23dac1fe3c134426949b9653b48b88c0 [ 833.959680] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c30b98003e6b46ab9a9e8dd896177312 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 833.970103] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c30b98003e6b46ab9a9e8dd896177312 [ 834.130645] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8376fe3-c6be-4549-a4a8-f79245d9458c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.137701] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5e3584-b760-4e9c-b954-905f41fe2c6f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.169176] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65eb90c3-1060-48ff-9fdf-e0677a46078e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.176585] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1e489d6-b141-4ab6-9859-7bcfd36e4182 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.191249] env[61594]: DEBUG nova.compute.provider_tree [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 834.191780] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 7d087e843de748aab9e8b4b717b34704 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.200336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d087e843de748aab9e8b4b717b34704 [ 834.201345] env[61594]: DEBUG nova.scheduler.client.report [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 834.204328] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 7902c955fe654e52a93e1bb32507f26a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.219081] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7902c955fe654e52a93e1bb32507f26a [ 834.219825] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.319s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.220307] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 834.221981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg e5e475c792d24d639f0d37761b084dc2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.255856] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e5e475c792d24d639f0d37761b084dc2 [ 834.257564] env[61594]: DEBUG nova.compute.utils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 834.258181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg ae6c05113d1040f4ac3a45399ccb5d52 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.259064] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 834.274181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae6c05113d1040f4ac3a45399ccb5d52 [ 834.274434] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 834.276194] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 5b2120a1916c48a1897d90a023763169 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.309972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b2120a1916c48a1897d90a023763169 [ 834.314056] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c4dac7737cc846d6aa360dd754132b7c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.358141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4dac7737cc846d6aa360dd754132b7c [ 834.359427] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 834.390096] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.390442] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.390625] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.390846] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.391133] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.391462] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.391699] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.391870] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.392080] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.392263] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.392459] env[61594]: DEBUG nova.virt.hardware [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.393393] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2412fa7d-72e0-4cea-9d43-11203130a628 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.403821] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ffa923-a87c-4a2f-a379-870c8f63fa19 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.423855] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.431283] env[61594]: DEBUG oslo.service.loopingcall [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.432285] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 834.432637] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1ba1361-ecbb-49bc-945b-d762fc0111b7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.451835] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.451835] env[61594]: value = "task-1291413" [ 834.451835] env[61594]: _type = "Task" [ 834.451835] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.462815] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291413, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.709627] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.710193] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg b7cce690a76543258ded3b01a575ad1f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.720863] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b7cce690a76543258ded3b01a575ad1f [ 834.721893] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Releasing lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.722423] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 834.722620] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 834.723186] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d7cd219-38ec-40f3-a447-971a42f2270f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.732301] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab4d7e26-4206-4e0a-a6d0-d3f3b997acfd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.758253] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7cacf5b7-500d-493c-a86a-2a8a03cc6eac could not be found. [ 834.758627] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 834.758887] env[61594]: INFO nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 834.759361] env[61594]: DEBUG oslo.service.loopingcall [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.760016] env[61594]: DEBUG nova.compute.manager [-] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 834.760224] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 834.840624] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.844021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3b199ec8fb574dd6a5cf33ffcf2b111d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.854621] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b199ec8fb574dd6a5cf33ffcf2b111d [ 834.855384] env[61594]: DEBUG nova.network.neutron [-] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.856039] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7d599dda20a7401498a971b0931aa25d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.869081] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d599dda20a7401498a971b0931aa25d [ 834.870168] env[61594]: INFO nova.compute.manager [-] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Took 0.11 seconds to deallocate network for instance. [ 834.875092] env[61594]: DEBUG nova.compute.claims [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 834.875365] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 834.875647] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.877745] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg d875aadeb1814ff0bea76170f2ba088e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 834.917346] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d875aadeb1814ff0bea76170f2ba088e [ 834.974042] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291413, 'name': CreateVM_Task} progress is 25%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.028783] env[61594]: ERROR nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. [ 835.028783] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 835.028783] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 835.028783] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 835.028783] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 835.028783] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 835.028783] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 835.028783] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 835.028783] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.028783] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 835.028783] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.028783] env[61594]: ERROR nova.compute.manager raise self.value [ 835.028783] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 835.028783] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 835.028783] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.028783] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 835.029823] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.029823] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 835.029823] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. [ 835.029823] env[61594]: ERROR nova.compute.manager [ 835.029823] env[61594]: Traceback (most recent call last): [ 835.029823] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 835.029823] env[61594]: listener.cb(fileno) [ 835.029823] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 835.029823] env[61594]: result = function(*args, **kwargs) [ 835.029823] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 835.029823] env[61594]: return func(*args, **kwargs) [ 835.029823] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 835.029823] env[61594]: raise e [ 835.029823] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 835.029823] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 835.029823] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 835.029823] env[61594]: created_port_ids = self._update_ports_for_instance( [ 835.029823] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 835.029823] env[61594]: with excutils.save_and_reraise_exception(): [ 835.029823] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.029823] env[61594]: self.force_reraise() [ 835.029823] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.029823] env[61594]: raise self.value [ 835.029823] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 835.029823] env[61594]: updated_port = self._update_port( [ 835.029823] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.029823] env[61594]: _ensure_no_port_binding_failure(port) [ 835.029823] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.029823] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 835.030811] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. [ 835.030811] env[61594]: Removing descriptor: 24 [ 835.030811] env[61594]: ERROR nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Traceback (most recent call last): [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] yield resources [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self.driver.spawn(context, instance, image_meta, [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self._vmops.spawn(context, instance, image_meta, injected_files, [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 835.030811] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] vm_ref = self.build_virtual_machine(instance, [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] vif_infos = vmwarevif.get_vif_info(self._session, [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] for vif in network_info: [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return self._sync_wrapper(fn, *args, **kwargs) [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self.wait() [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self[:] = self._gt.wait() [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return self._exit_event.wait() [ 835.031193] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] result = hub.switch() [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return self.greenlet.switch() [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] result = function(*args, **kwargs) [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return func(*args, **kwargs) [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] raise e [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] nwinfo = self.network_api.allocate_for_instance( [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 835.031577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] created_port_ids = self._update_ports_for_instance( [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] with excutils.save_and_reraise_exception(): [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self.force_reraise() [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] raise self.value [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] updated_port = self._update_port( [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] _ensure_no_port_binding_failure(port) [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.032857] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] raise exception.PortBindingFailed(port_id=port['id']) [ 835.033577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] nova.exception.PortBindingFailed: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. [ 835.033577] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] [ 835.033577] env[61594]: INFO nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Terminating instance [ 835.035108] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "refresh_cache-382b96fa-d807-4f96-b47e-784dfcd26437" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.035108] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquired lock "refresh_cache-382b96fa-d807-4f96-b47e-784dfcd26437" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.035108] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 835.035108] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 1e60a88e187e47538ac00d050167c00f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.044589] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e60a88e187e47538ac00d050167c00f [ 835.089442] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4794c1-ba1b-4335-8796-0851109b5f9e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.100266] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4946df8a-a9ef-4488-8814-b21aa71b58fd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.133049] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.135692] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a909d84-8fca-44e4-afc8-2b31b5f892f6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.142991] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b07438-32cb-4ffa-84d2-31f1de071931 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.160290] env[61594]: DEBUG nova.compute.provider_tree [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.160916] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 95be78ab832a459a84c34f18d18aac5b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.169998] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95be78ab832a459a84c34f18d18aac5b [ 835.171156] env[61594]: DEBUG nova.scheduler.client.report [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 835.173622] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 94efa599db214412b23d5231a0518276 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.192011] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94efa599db214412b23d5231a0518276 [ 835.192011] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.316s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 835.192366] env[61594]: ERROR nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Traceback (most recent call last): [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self.driver.spawn(context, instance, image_meta, [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self._vmops.spawn(context, instance, image_meta, injected_files, [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] vm_ref = self.build_virtual_machine(instance, [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] vif_infos = vmwarevif.get_vif_info(self._session, [ 835.192366] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] for vif in network_info: [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return self._sync_wrapper(fn, *args, **kwargs) [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self.wait() [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self[:] = self._gt.wait() [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return self._exit_event.wait() [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] result = hub.switch() [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 835.192940] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return self.greenlet.switch() [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] result = function(*args, **kwargs) [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] return func(*args, **kwargs) [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] raise e [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] nwinfo = self.network_api.allocate_for_instance( [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] created_port_ids = self._update_ports_for_instance( [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] with excutils.save_and_reraise_exception(): [ 835.193501] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] self.force_reraise() [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] raise self.value [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] updated_port = self._update_port( [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] _ensure_no_port_binding_failure(port) [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] raise exception.PortBindingFailed(port_id=port['id']) [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] nova.exception.PortBindingFailed: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. [ 835.194385] env[61594]: ERROR nova.compute.manager [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] [ 835.194867] env[61594]: DEBUG nova.compute.utils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 835.194867] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Build of instance 7cacf5b7-500d-493c-a86a-2a8a03cc6eac was re-scheduled: Binding failed for port 87f8c98f-4a27-42fc-b13f-d8e6af9b323a, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 835.194986] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 835.195451] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Acquiring lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 835.195451] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Acquired lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.195598] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 835.196239] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg edd11c29159e48168d8503695087e1b3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.205813] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg edd11c29159e48168d8503695087e1b3 [ 835.270896] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.471257] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291413, 'name': CreateVM_Task} progress is 25%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.701922] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "1c342a36-b05d-452e-bbe1-fedf93e9f9d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.702178] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "1c342a36-b05d-452e-bbe1-fedf93e9f9d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.702661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg b1270c16178f460c89bc3d94740e21dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.713141] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1270c16178f460c89bc3d94740e21dd [ 835.713683] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 835.715530] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 2823481a14364326b7a0764a566c9d56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.759637] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2823481a14364326b7a0764a566c9d56 [ 835.783066] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.788090] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 835.788090] env[61594]: INFO nova.compute.claims [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 835.788090] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 7fedfce0da9c4195b512f32ca5b1f829 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.831424] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.831424] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 117bb8eda98746dc8555bb1fe5018a73 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.837481] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7fedfce0da9c4195b512f32ca5b1f829 [ 835.844399] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 8af5024833004931a2347795d229e0df in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.849595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 117bb8eda98746dc8555bb1fe5018a73 [ 835.849595] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Releasing lock "refresh_cache-382b96fa-d807-4f96-b47e-784dfcd26437" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.849595] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 835.849595] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 835.849595] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1dc71bb3-607a-4cd8-ba71-f30a72722ce4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.853763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8af5024833004931a2347795d229e0df [ 835.863208] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4079631-1fa8-42c1-8fe0-89a5248c0416 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.888398] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 382b96fa-d807-4f96-b47e-784dfcd26437 could not be found. [ 835.888398] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 835.888398] env[61594]: INFO nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Took 0.04 seconds to destroy the instance on the hypervisor. [ 835.888785] env[61594]: DEBUG oslo.service.loopingcall [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 835.891289] env[61594]: DEBUG nova.compute.manager [-] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 835.891432] env[61594]: DEBUG nova.network.neutron [-] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.967406] env[61594]: DEBUG nova.network.neutron [-] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.967470] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7de078b66b8f4ec0bdfb71b974a57ab7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.979100] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291413, 'name': CreateVM_Task} progress is 25%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.980103] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.980645] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 0930abb7a8b1400cb9c27bec40c09563 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.982430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7de078b66b8f4ec0bdfb71b974a57ab7 [ 835.986021] env[61594]: DEBUG nova.network.neutron [-] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.986021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 8f315651bf9048b29d5a7c2299520c22 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 835.992766] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f315651bf9048b29d5a7c2299520c22 [ 835.993497] env[61594]: INFO nova.compute.manager [-] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Took 0.10 seconds to deallocate network for instance. [ 835.995898] env[61594]: DEBUG nova.compute.claims [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 835.996093] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.996563] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0930abb7a8b1400cb9c27bec40c09563 [ 835.998652] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Releasing lock "refresh_cache-7cacf5b7-500d-493c-a86a-2a8a03cc6eac" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.998652] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 835.998652] env[61594]: DEBUG nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 835.998652] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 836.055186] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7822ff-1f42-4308-b3ca-7046307f5863 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.062890] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f48aebf-65d4-48e6-87d1-8958c87d79ba {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.102306] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 836.103040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 338a1e13eaef4ed59cb1851ec9f1b41e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.104399] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b084164c-75eb-4e5d-9f43-f8a7b28b00ef {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.112432] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b71ff0-ecf4-4cdd-8424-311221733d27 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.116782] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 338a1e13eaef4ed59cb1851ec9f1b41e [ 836.118035] env[61594]: DEBUG nova.network.neutron [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.121023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 858b57ae6e2f454fbf8e4bc8d074a7e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.129725] env[61594]: DEBUG nova.compute.provider_tree [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.130242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 3ffe976e260241cb83e8d8d6e605dee2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.131831] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 858b57ae6e2f454fbf8e4bc8d074a7e4 [ 836.132874] env[61594]: INFO nova.compute.manager [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] [instance: 7cacf5b7-500d-493c-a86a-2a8a03cc6eac] Took 0.13 seconds to deallocate network for instance. [ 836.134042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 924b0210c5464f66a74bc28a672b7df2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.141420] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3ffe976e260241cb83e8d8d6e605dee2 [ 836.142664] env[61594]: DEBUG nova.scheduler.client.report [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 836.146213] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 0a41118856fc4ca8984281c0c450c0ac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.169621] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a41118856fc4ca8984281c0c450c0ac [ 836.170658] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.387s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.171196] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 836.173013] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg f13ef75acf8a4fa89d94a023f983e9a3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.178026] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.178s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 836.178026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg a6a979b70cea4398aa78686550bac625 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.178026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 924b0210c5464f66a74bc28a672b7df2 [ 836.180386] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg ae061e53919c4ae6875d57132aa6809a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.225025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a6a979b70cea4398aa78686550bac625 [ 836.229103] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae061e53919c4ae6875d57132aa6809a [ 836.233026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f13ef75acf8a4fa89d94a023f983e9a3 [ 836.233972] env[61594]: DEBUG nova.compute.utils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 836.234633] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg a4c75bb426cc4c45a6965d11680ecc25 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.235710] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 836.236103] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 836.249019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4c75bb426cc4c45a6965d11680ecc25 [ 836.249019] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 836.249918] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 5c16afab73994c1480c8305b44d8b9e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.264710] env[61594]: INFO nova.scheduler.client.report [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Deleted allocations for instance 7cacf5b7-500d-493c-a86a-2a8a03cc6eac [ 836.271084] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Expecting reply to msg 730c1d133cdb4fa9a4373be11359bd23 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.293535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c16afab73994c1480c8305b44d8b9e0 [ 836.296875] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg fd314e1126d34cffac95a86e7a820d8e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.299133] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 730c1d133cdb4fa9a4373be11359bd23 [ 836.299693] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4408f30b-b452-4784-99a2-f2e10239d43e tempest-InstanceActionsNegativeTestJSON-168004218 tempest-InstanceActionsNegativeTestJSON-168004218-project-member] Lock "7cacf5b7-500d-493c-a86a-2a8a03cc6eac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.860s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.340515] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fd314e1126d34cffac95a86e7a820d8e [ 836.341821] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 836.379658] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 836.379990] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 836.380360] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 836.380670] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 836.380887] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 836.381121] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 836.381903] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 836.382196] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 836.382452] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 836.382650] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 836.382881] env[61594]: DEBUG nova.virt.hardware [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 836.383811] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20834e30-9811-4c05-b134-1f40aea8ac7b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.396386] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c694f1c-d602-4633-95dd-4b9a95feab1f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.428453] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d33178-6efd-4d1b-bdfe-fb55f85ae39e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.436212] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f725c9d-a302-493b-a012-48dc622e42aa {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.473685] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551ac8d9-9229-4be8-94e2-8e8836f91da9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.482133] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291413, 'name': CreateVM_Task, 'duration_secs': 1.846919} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.484269] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 836.484800] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.484968] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.485316] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 836.486555] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a9f4dd-0162-4b82-959d-a0180ae69162 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.490525] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45082bc3-c34f-4bdb-b4c7-b89ae8a1962b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.496205] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 836.496205] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]529d5edd-c59d-dd99-d3ed-12caa9f83e97" [ 836.496205] env[61594]: _type = "Task" [ 836.496205] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.504353] env[61594]: DEBUG nova.compute.provider_tree [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 836.504855] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg bf6c4ec6de8d4b149097b96c4a1ebedd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.517040] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.517040] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 836.517040] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.517040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bf6c4ec6de8d4b149097b96c4a1ebedd [ 836.518667] env[61594]: DEBUG nova.scheduler.client.report [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 836.521676] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg c17b4aa4764d40fe89565fb0933c88b4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.555649] env[61594]: DEBUG nova.policy [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bcf95f76d0f54cbebde1301f62b44bcd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7300699b4cb49de89d096e3a1ac1778', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 836.560077] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c17b4aa4764d40fe89565fb0933c88b4 [ 836.561048] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.387s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.561670] env[61594]: ERROR nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Traceback (most recent call last): [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self.driver.spawn(context, instance, image_meta, [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self._vmops.spawn(context, instance, image_meta, injected_files, [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] vm_ref = self.build_virtual_machine(instance, [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] vif_infos = vmwarevif.get_vif_info(self._session, [ 836.561670] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] for vif in network_info: [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return self._sync_wrapper(fn, *args, **kwargs) [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self.wait() [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self[:] = self._gt.wait() [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return self._exit_event.wait() [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] result = hub.switch() [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 836.562051] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return self.greenlet.switch() [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] result = function(*args, **kwargs) [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] return func(*args, **kwargs) [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] raise e [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] nwinfo = self.network_api.allocate_for_instance( [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] created_port_ids = self._update_ports_for_instance( [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] with excutils.save_and_reraise_exception(): [ 836.562438] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] self.force_reraise() [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] raise self.value [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] updated_port = self._update_port( [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] _ensure_no_port_binding_failure(port) [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] raise exception.PortBindingFailed(port_id=port['id']) [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] nova.exception.PortBindingFailed: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. [ 836.562832] env[61594]: ERROR nova.compute.manager [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] [ 836.563163] env[61594]: DEBUG nova.compute.utils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 836.564062] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Build of instance 382b96fa-d807-4f96-b47e-784dfcd26437 was re-scheduled: Binding failed for port 51ba6a99-29fe-431c-bc59-906ac6340e1a, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 836.564366] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 836.564591] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquiring lock "refresh_cache-382b96fa-d807-4f96-b47e-784dfcd26437" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 836.564785] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Acquired lock "refresh_cache-382b96fa-d807-4f96-b47e-784dfcd26437" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.564910] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 836.565481] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 950dcaeed04b4aca9bfeb68378462774 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 836.576702] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 950dcaeed04b4aca9bfeb68378462774 [ 836.648026] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.356664] env[61594]: ERROR nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. [ 837.356664] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 837.356664] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 837.356664] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 837.356664] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.356664] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 837.356664] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.356664] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 837.356664] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.356664] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 837.356664] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.356664] env[61594]: ERROR nova.compute.manager raise self.value [ 837.356664] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.356664] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 837.356664] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.356664] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 837.359464] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.359464] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 837.359464] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. [ 837.359464] env[61594]: ERROR nova.compute.manager [ 837.359464] env[61594]: Traceback (most recent call last): [ 837.359464] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 837.359464] env[61594]: listener.cb(fileno) [ 837.359464] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 837.359464] env[61594]: result = function(*args, **kwargs) [ 837.359464] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 837.359464] env[61594]: return func(*args, **kwargs) [ 837.359464] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 837.359464] env[61594]: raise e [ 837.359464] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 837.359464] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 837.359464] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.359464] env[61594]: created_port_ids = self._update_ports_for_instance( [ 837.359464] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.359464] env[61594]: with excutils.save_and_reraise_exception(): [ 837.359464] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.359464] env[61594]: self.force_reraise() [ 837.359464] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.359464] env[61594]: raise self.value [ 837.359464] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.359464] env[61594]: updated_port = self._update_port( [ 837.359464] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.359464] env[61594]: _ensure_no_port_binding_failure(port) [ 837.359464] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.359464] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 837.360851] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. [ 837.360851] env[61594]: Removing descriptor: 25 [ 837.360851] env[61594]: ERROR nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Traceback (most recent call last): [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] yield resources [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self.driver.spawn(context, instance, image_meta, [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self._vmops.spawn(context, instance, image_meta, injected_files, [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 837.360851] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] vm_ref = self.build_virtual_machine(instance, [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] vif_infos = vmwarevif.get_vif_info(self._session, [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] for vif in network_info: [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return self._sync_wrapper(fn, *args, **kwargs) [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self.wait() [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self[:] = self._gt.wait() [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return self._exit_event.wait() [ 837.361445] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] result = hub.switch() [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return self.greenlet.switch() [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] result = function(*args, **kwargs) [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return func(*args, **kwargs) [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] raise e [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] nwinfo = self.network_api.allocate_for_instance( [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.362197] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] created_port_ids = self._update_ports_for_instance( [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] with excutils.save_and_reraise_exception(): [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self.force_reraise() [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] raise self.value [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] updated_port = self._update_port( [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] _ensure_no_port_binding_failure(port) [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.363347] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] raise exception.PortBindingFailed(port_id=port['id']) [ 837.363805] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] nova.exception.PortBindingFailed: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. [ 837.363805] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] [ 837.363805] env[61594]: INFO nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Terminating instance [ 837.363805] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-49d32ccb-b1fd-4640-bc6f-8c08f9df0423" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.363805] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-49d32ccb-b1fd-4640-bc6f-8c08f9df0423" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.363805] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.364249] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 308aadb1165c4dfbb954d3c90d384c74 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.369331] env[61594]: ERROR nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. [ 837.369331] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 837.369331] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 837.369331] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 837.369331] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.369331] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 837.369331] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.369331] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 837.369331] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.369331] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 837.369331] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.369331] env[61594]: ERROR nova.compute.manager raise self.value [ 837.369331] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.369331] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 837.369331] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.369331] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 837.372025] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.372025] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 837.372025] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. [ 837.372025] env[61594]: ERROR nova.compute.manager [ 837.372025] env[61594]: Traceback (most recent call last): [ 837.372025] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 837.372025] env[61594]: listener.cb(fileno) [ 837.372025] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 837.372025] env[61594]: result = function(*args, **kwargs) [ 837.372025] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 837.372025] env[61594]: return func(*args, **kwargs) [ 837.372025] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 837.372025] env[61594]: raise e [ 837.372025] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 837.372025] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 837.372025] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.372025] env[61594]: created_port_ids = self._update_ports_for_instance( [ 837.372025] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.372025] env[61594]: with excutils.save_and_reraise_exception(): [ 837.372025] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.372025] env[61594]: self.force_reraise() [ 837.372025] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.372025] env[61594]: raise self.value [ 837.372025] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.372025] env[61594]: updated_port = self._update_port( [ 837.372025] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.372025] env[61594]: _ensure_no_port_binding_failure(port) [ 837.372025] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.372025] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 837.372886] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. [ 837.372886] env[61594]: Removing descriptor: 20 [ 837.372886] env[61594]: ERROR nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Traceback (most recent call last): [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] yield resources [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self.driver.spawn(context, instance, image_meta, [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 837.372886] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] vm_ref = self.build_virtual_machine(instance, [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] vif_infos = vmwarevif.get_vif_info(self._session, [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] for vif in network_info: [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return self._sync_wrapper(fn, *args, **kwargs) [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self.wait() [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self[:] = self._gt.wait() [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return self._exit_event.wait() [ 837.373265] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] result = hub.switch() [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return self.greenlet.switch() [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] result = function(*args, **kwargs) [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return func(*args, **kwargs) [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] raise e [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] nwinfo = self.network_api.allocate_for_instance( [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 837.373768] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] created_port_ids = self._update_ports_for_instance( [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] with excutils.save_and_reraise_exception(): [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self.force_reraise() [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] raise self.value [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] updated_port = self._update_port( [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] _ensure_no_port_binding_failure(port) [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 837.374195] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] raise exception.PortBindingFailed(port_id=port['id']) [ 837.374552] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] nova.exception.PortBindingFailed: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. [ 837.374552] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] [ 837.374552] env[61594]: INFO nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Terminating instance [ 837.375644] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 308aadb1165c4dfbb954d3c90d384c74 [ 837.377864] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-bea7629d-c264-457f-b887-443f1ada1e9c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 837.379093] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-bea7629d-c264-457f-b887-443f1ada1e9c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.379093] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 837.380536] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 06c2310a7e4842a5888da42d5d397275 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.397582] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06c2310a7e4842a5888da42d5d397275 [ 837.410274] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.411653] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 0ab35e4f9fa149269ea48dfbaecb0319 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.434410] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0ab35e4f9fa149269ea48dfbaecb0319 [ 837.435357] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Releasing lock "refresh_cache-382b96fa-d807-4f96-b47e-784dfcd26437" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 837.435742] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 837.435784] env[61594]: DEBUG nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 837.435961] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 837.450191] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.506714] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.509118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 64fc265086e249a58036fa3eab991097 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.519932] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64fc265086e249a58036fa3eab991097 [ 837.523022] env[61594]: DEBUG nova.network.neutron [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.523022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 8beae94f51a64611ad6639a3ac9fc13f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.533013] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8beae94f51a64611ad6639a3ac9fc13f [ 837.534247] env[61594]: INFO nova.compute.manager [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] [instance: 382b96fa-d807-4f96-b47e-784dfcd26437] Took 0.10 seconds to deallocate network for instance. [ 837.537049] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 369dd24eb2d54a4d8004a8e698e34f6b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.625426] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 369dd24eb2d54a4d8004a8e698e34f6b [ 837.630786] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg db6e51c35e3c458994323dd1b276bc0b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.673116] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db6e51c35e3c458994323dd1b276bc0b [ 837.703500] env[61594]: INFO nova.scheduler.client.report [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Deleted allocations for instance 382b96fa-d807-4f96-b47e-784dfcd26437 [ 837.710226] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Expecting reply to msg 7d54735b90074c98855e6e3c9b036ae9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.716832] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 837.725831] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d54735b90074c98855e6e3c9b036ae9 [ 837.727220] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c0015503-e2c1-4e56-9631-c2c98cca1bc3 tempest-AttachVolumeNegativeTest-427402148 tempest-AttachVolumeNegativeTest-427402148-project-member] Lock "382b96fa-d807-4f96-b47e-784dfcd26437" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.273s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.751786] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "b21f2e22-a6d6-4ab5-baa7-110004aa776e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.752213] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "b21f2e22-a6d6-4ab5-baa7-110004aa776e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.752639] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 38e31c0b123f4720b959c8dccd483ae9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.766797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38e31c0b123f4720b959c8dccd483ae9 [ 837.767341] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 837.769150] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg d52dfd7f7cab4acba8f60edfc6ed9c95 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.806646] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d52dfd7f7cab4acba8f60edfc6ed9c95 [ 837.831680] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.832068] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.834259] env[61594]: INFO nova.compute.claims [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 837.836087] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg bbae16f132614f6aa974cafd904ff04f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.878612] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbae16f132614f6aa974cafd904ff04f [ 837.880299] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg e0dba37a294048aab5b6e6acace323af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 837.890755] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0dba37a294048aab5b6e6acace323af [ 838.046350] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0715272f-6a62-45d7-97d7-5bbf775eee5a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.055282] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db20900-0534-4031-a175-37c5205c873a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.087381] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20d9d7b-51a6-43fc-a39e-fedf3b6dbeeb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.095142] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d026aa-de74-4d51-82f0-e910be4edfdb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.108615] env[61594]: DEBUG nova.compute.provider_tree [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.109196] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 564d993938974f029976841f0bb48976 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.117196] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 564d993938974f029976841f0bb48976 [ 838.118500] env[61594]: DEBUG nova.scheduler.client.report [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 838.120866] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 8064d0abf10642bfa0fa07f9a6737f9f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.132995] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8064d0abf10642bfa0fa07f9a6737f9f [ 838.133868] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.302s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.134251] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 838.135857] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c8232975820c409e905bea8f17e408dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.167508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8232975820c409e905bea8f17e408dc [ 838.168812] env[61594]: DEBUG nova.compute.utils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 838.169809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 39d56cede5324f7b9d93e20bf99e63ac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.174020] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 838.174020] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 838.179764] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 39d56cede5324f7b9d93e20bf99e63ac [ 838.180378] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 838.182149] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg d03c963b12b147d39986ca44272bb302 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.194845] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.195450] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg e6e4f4df72e94e64b29adaf9cb244da5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.205417] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6e4f4df72e94e64b29adaf9cb244da5 [ 838.206235] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-49d32ccb-b1fd-4640-bc6f-8c08f9df0423" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.206501] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 838.206784] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 838.208080] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b50e27a8-5d03-4051-a1a5-b448a8d6a875 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.213297] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d03c963b12b147d39986ca44272bb302 [ 838.215919] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 925ad0181981424e836f7c797cee4521 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.221875] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b861ed6-8343-4a1b-906e-8bd6c44a24c8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.242361] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.242920] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 4e48482fc3a04b8589235048b0edd596 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.251271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 925ad0181981424e836f7c797cee4521 [ 838.251905] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e48482fc3a04b8589235048b0edd596 [ 838.253769] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49d32ccb-b1fd-4640-bc6f-8c08f9df0423 could not be found. [ 838.254012] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 838.254278] env[61594]: INFO nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Took 0.05 seconds to destroy the instance on the hypervisor. [ 838.254536] env[61594]: DEBUG oslo.service.loopingcall [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.255535] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 838.257702] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-bea7629d-c264-457f-b887-443f1ada1e9c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 838.258087] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 838.258277] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 838.258820] env[61594]: DEBUG nova.compute.manager [-] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 838.258921] env[61594]: DEBUG nova.network.neutron [-] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 838.261026] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b425b62-ae13-4d65-ba1c-8bb693503d4b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.270755] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112372e6-a8e2-474f-85d0-abd53743bd4b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.289736] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 838.290140] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 838.290232] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.290340] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 838.290522] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.290682] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 838.290924] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 838.291186] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 838.291372] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 838.291670] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 838.291727] env[61594]: DEBUG nova.virt.hardware [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 838.292578] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4137b34-3094-4a92-b765-e76eeb2d2aaf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.300503] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bea7629d-c264-457f-b887-443f1ada1e9c could not be found. [ 838.300700] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 838.300883] env[61594]: INFO nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 838.301143] env[61594]: DEBUG oslo.service.loopingcall [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 838.301734] env[61594]: DEBUG nova.compute.manager [-] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 838.301833] env[61594]: DEBUG nova.network.neutron [-] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 838.306513] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a8c3b1-0376-4b4f-abda-3d6179586cca {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.322044] env[61594]: DEBUG nova.network.neutron [-] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.322590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 66f84535e71849fc8f5c07f20c418b35 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.328963] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 66f84535e71849fc8f5c07f20c418b35 [ 838.329354] env[61594]: DEBUG nova.network.neutron [-] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.329718] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c8b2fd13525345138dc058d6bcbbc3e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.339981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8b2fd13525345138dc058d6bcbbc3e0 [ 838.340683] env[61594]: INFO nova.compute.manager [-] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Took 0.08 seconds to deallocate network for instance. [ 838.343276] env[61594]: DEBUG nova.compute.claims [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 838.343516] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.343750] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.345558] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 36812982e77c4fcb9cf5007b096d10fb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.347193] env[61594]: DEBUG nova.network.neutron [-] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.347701] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 366bff940d8148fd954e37de9327832b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.355231] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 366bff940d8148fd954e37de9327832b [ 838.355685] env[61594]: DEBUG nova.network.neutron [-] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.356135] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6a06744774264387a0dc7d4bdcf30d8a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.375862] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a06744774264387a0dc7d4bdcf30d8a [ 838.376412] env[61594]: INFO nova.compute.manager [-] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Took 0.07 seconds to deallocate network for instance. [ 838.378845] env[61594]: DEBUG nova.compute.claims [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 838.379041] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.385636] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36812982e77c4fcb9cf5007b096d10fb [ 838.564574] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0d597e-ce23-4249-a1df-293c9774ab17 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.573937] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b597a683-af6c-44aa-8d11-c4fd862d1c61 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.612340] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc2a1d5-65f7-4aee-ba66-03d708e48475 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.621861] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a48a5a-ba71-478c-8def-7b58ef3de13d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.637950] env[61594]: DEBUG nova.compute.provider_tree [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.638525] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg bc563f93adf14925ba6517b8753e6eb4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.648498] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc563f93adf14925ba6517b8753e6eb4 [ 838.649382] env[61594]: DEBUG nova.scheduler.client.report [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 838.651877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 40277c4b250b4813a700555bf38e9e5d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.661745] env[61594]: DEBUG nova.policy [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f05a61e0f6499bb35c44d254226249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bff3be1976444e58a2b7be93d47f50ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 838.669521] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 40277c4b250b4813a700555bf38e9e5d [ 838.670566] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.327s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.671046] env[61594]: ERROR nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Traceback (most recent call last): [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self.driver.spawn(context, instance, image_meta, [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self._vmops.spawn(context, instance, image_meta, injected_files, [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] vm_ref = self.build_virtual_machine(instance, [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] vif_infos = vmwarevif.get_vif_info(self._session, [ 838.671046] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] for vif in network_info: [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return self._sync_wrapper(fn, *args, **kwargs) [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self.wait() [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self[:] = self._gt.wait() [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return self._exit_event.wait() [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] result = hub.switch() [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 838.671508] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return self.greenlet.switch() [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] result = function(*args, **kwargs) [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] return func(*args, **kwargs) [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] raise e [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] nwinfo = self.network_api.allocate_for_instance( [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] created_port_ids = self._update_ports_for_instance( [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] with excutils.save_and_reraise_exception(): [ 838.671925] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] self.force_reraise() [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] raise self.value [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] updated_port = self._update_port( [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] _ensure_no_port_binding_failure(port) [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] raise exception.PortBindingFailed(port_id=port['id']) [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] nova.exception.PortBindingFailed: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. [ 838.672316] env[61594]: ERROR nova.compute.manager [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] [ 838.672664] env[61594]: DEBUG nova.compute.utils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 838.673404] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Build of instance 49d32ccb-b1fd-4640-bc6f-8c08f9df0423 was re-scheduled: Binding failed for port 24404798-6dd9-490d-817a-2a8df00a98ed, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 838.673785] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 838.674045] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-49d32ccb-b1fd-4640-bc6f-8c08f9df0423" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.674175] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-49d32ccb-b1fd-4640-bc6f-8c08f9df0423" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.674369] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.674833] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 7c23c3076c6d45e6994c62e8d0635c39 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.675625] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.297s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.678348] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 880ac998281640c2af815da30259ca7e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.683927] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c23c3076c6d45e6994c62e8d0635c39 [ 838.729217] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 880ac998281640c2af815da30259ca7e [ 838.733952] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.774151] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Successfully created port: fece8e69-07e9-4015-ba5e-7aa4a9fb00ef {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 838.891119] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ff3687-e3c7-487f-b34e-233bd8b2a77a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.899372] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5da50c-182d-48b3-b6a8-ba3f176095f6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.932829] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acac09c-a75c-4fee-9eae-9f9f65384eff {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.941062] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1388228-0997-453e-938d-b76f9622e3b6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.954848] env[61594]: DEBUG nova.compute.provider_tree [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 838.955850] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 534e4e73726f4045b8b215c9714c74ba in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.964865] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 534e4e73726f4045b8b215c9714c74ba [ 838.965850] env[61594]: DEBUG nova.scheduler.client.report [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 838.968292] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 4a8a137957504d85b9079bfd589d9d47 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.982937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a8a137957504d85b9079bfd589d9d47 [ 838.983640] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.308s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.984259] env[61594]: ERROR nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Traceback (most recent call last): [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self.driver.spawn(context, instance, image_meta, [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] vm_ref = self.build_virtual_machine(instance, [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] vif_infos = vmwarevif.get_vif_info(self._session, [ 838.984259] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] for vif in network_info: [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return self._sync_wrapper(fn, *args, **kwargs) [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self.wait() [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self[:] = self._gt.wait() [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return self._exit_event.wait() [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] result = hub.switch() [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 838.984662] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return self.greenlet.switch() [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] result = function(*args, **kwargs) [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] return func(*args, **kwargs) [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] raise e [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] nwinfo = self.network_api.allocate_for_instance( [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] created_port_ids = self._update_ports_for_instance( [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] with excutils.save_and_reraise_exception(): [ 838.985132] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] self.force_reraise() [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] raise self.value [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] updated_port = self._update_port( [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] _ensure_no_port_binding_failure(port) [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] raise exception.PortBindingFailed(port_id=port['id']) [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] nova.exception.PortBindingFailed: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. [ 838.985556] env[61594]: ERROR nova.compute.manager [instance: bea7629d-c264-457f-b887-443f1ada1e9c] [ 838.986033] env[61594]: DEBUG nova.compute.utils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 838.986701] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Build of instance bea7629d-c264-457f-b887-443f1ada1e9c was re-scheduled: Binding failed for port 18c8641b-7198-4e85-a2b7-ddf8a76c3639, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 838.987131] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 838.987354] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquiring lock "refresh_cache-bea7629d-c264-457f-b887-443f1ada1e9c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.987501] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Acquired lock "refresh_cache-bea7629d-c264-457f-b887-443f1ada1e9c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.989345] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.989345] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 4052bdce2c4047a2ae1eb9d93c024f1b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 838.997091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4052bdce2c4047a2ae1eb9d93c024f1b [ 839.067505] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.264592] env[61594]: ERROR nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. [ 839.264592] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 839.264592] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 839.264592] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 839.264592] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 839.264592] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 839.264592] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 839.264592] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 839.264592] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 839.264592] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 839.264592] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 839.264592] env[61594]: ERROR nova.compute.manager raise self.value [ 839.264592] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 839.264592] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 839.264592] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 839.264592] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 839.265183] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 839.265183] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 839.265183] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. [ 839.265183] env[61594]: ERROR nova.compute.manager [ 839.265183] env[61594]: Traceback (most recent call last): [ 839.265183] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 839.265183] env[61594]: listener.cb(fileno) [ 839.265183] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 839.265183] env[61594]: result = function(*args, **kwargs) [ 839.265183] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 839.265183] env[61594]: return func(*args, **kwargs) [ 839.265183] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 839.265183] env[61594]: raise e [ 839.265183] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 839.265183] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 839.265183] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 839.265183] env[61594]: created_port_ids = self._update_ports_for_instance( [ 839.265183] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 839.265183] env[61594]: with excutils.save_and_reraise_exception(): [ 839.265183] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 839.265183] env[61594]: self.force_reraise() [ 839.265183] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 839.265183] env[61594]: raise self.value [ 839.265183] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 839.265183] env[61594]: updated_port = self._update_port( [ 839.265183] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 839.265183] env[61594]: _ensure_no_port_binding_failure(port) [ 839.265183] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 839.265183] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 839.266185] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. [ 839.266185] env[61594]: Removing descriptor: 21 [ 839.266185] env[61594]: ERROR nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Traceback (most recent call last): [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] yield resources [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self.driver.spawn(context, instance, image_meta, [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 839.266185] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] vm_ref = self.build_virtual_machine(instance, [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] vif_infos = vmwarevif.get_vif_info(self._session, [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] for vif in network_info: [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return self._sync_wrapper(fn, *args, **kwargs) [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self.wait() [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self[:] = self._gt.wait() [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return self._exit_event.wait() [ 839.266667] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] result = hub.switch() [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return self.greenlet.switch() [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] result = function(*args, **kwargs) [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return func(*args, **kwargs) [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] raise e [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] nwinfo = self.network_api.allocate_for_instance( [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 839.267286] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] created_port_ids = self._update_ports_for_instance( [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] with excutils.save_and_reraise_exception(): [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self.force_reraise() [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] raise self.value [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] updated_port = self._update_port( [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] _ensure_no_port_binding_failure(port) [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 839.267799] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] raise exception.PortBindingFailed(port_id=port['id']) [ 839.268953] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] nova.exception.PortBindingFailed: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. [ 839.268953] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] [ 839.268953] env[61594]: INFO nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Terminating instance [ 839.268953] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-cb22b0a4-bcef-4964-bd63-5abb2789cedd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.269125] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-cb22b0a4-bcef-4964-bd63-5abb2789cedd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.269216] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.270774] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 4f66ea04b23749dfbcdba26cb8e0ff2a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.281585] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f66ea04b23749dfbcdba26cb8e0ff2a [ 839.358986] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.397037] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.397896] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 8f79f903ef9547e980033269e056a813 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.412789] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f79f903ef9547e980033269e056a813 [ 839.413576] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-49d32ccb-b1fd-4640-bc6f-8c08f9df0423" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.413832] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 839.414231] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 839.414231] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 839.482152] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.482152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 2e003a2581a248ac8fca4c061ad1a1e9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.483546] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.488370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg e77bfd73452b4cc09d6f67ce321afb9e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.494105] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e003a2581a248ac8fca4c061ad1a1e9 [ 839.494789] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Releasing lock "refresh_cache-bea7629d-c264-457f-b887-443f1ada1e9c" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.494967] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 839.495175] env[61594]: DEBUG nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 839.495349] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 839.499788] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e77bfd73452b4cc09d6f67ce321afb9e [ 839.499788] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.499788] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 5b99a1a5c6a744c08c209c974b8f96aa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.509075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b99a1a5c6a744c08c209c974b8f96aa [ 839.509778] env[61594]: INFO nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: 49d32ccb-b1fd-4640-bc6f-8c08f9df0423] Took 0.10 seconds to deallocate network for instance. [ 839.511535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 36fbbb7ff9c542d7afd11b92b7f575a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.549411] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.550202] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 9bdb061532ef4ae8a5ab95d7b35d7d55 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.557492] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bdb061532ef4ae8a5ab95d7b35d7d55 [ 839.558332] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36fbbb7ff9c542d7afd11b92b7f575a4 [ 839.558904] env[61594]: DEBUG nova.network.neutron [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.559448] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg c22b1918f45746c497d1acc18f86b97d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.562657] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg e7602f4c2e6841fabb48a1c79c3ebe64 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.566846] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c22b1918f45746c497d1acc18f86b97d [ 839.568024] env[61594]: INFO nova.compute.manager [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] [instance: bea7629d-c264-457f-b887-443f1ada1e9c] Took 0.07 seconds to deallocate network for instance. [ 839.568926] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 31ae12b55acd4874bd2920132fdc8ae9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.596876] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7602f4c2e6841fabb48a1c79c3ebe64 [ 839.601123] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 31ae12b55acd4874bd2920132fdc8ae9 [ 839.603762] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 7a426c47d7e74aac8011f706b0693556 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.631641] env[61594]: INFO nova.scheduler.client.report [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Deleted allocations for instance 49d32ccb-b1fd-4640-bc6f-8c08f9df0423 [ 839.641950] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg 32dc273518e14cdca48853db9eda645b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.645268] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7a426c47d7e74aac8011f706b0693556 [ 839.661644] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32dc273518e14cdca48853db9eda645b [ 839.662250] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "49d32ccb-b1fd-4640-bc6f-8c08f9df0423" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.115s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.672647] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.673227] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg cb762a34908d422a98cb7e802cb10f84 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.680641] env[61594]: INFO nova.scheduler.client.report [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Deleted allocations for instance bea7629d-c264-457f-b887-443f1ada1e9c [ 839.686165] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb762a34908d422a98cb7e802cb10f84 [ 839.686827] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Expecting reply to msg b437e7ae45a54cbc94dee7527fb41a42 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.687647] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-cb22b0a4-bcef-4964-bd63-5abb2789cedd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.688030] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 839.688229] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 839.688830] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-17a2c1f2-cc27-4c20-be99-a8d1ab275400 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.698796] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a78dc2-b92a-4803-b8c0-dbde7d030b69 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.711940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b437e7ae45a54cbc94dee7527fb41a42 [ 839.712781] env[61594]: DEBUG oslo_concurrency.lockutils [None req-82fa6a67-38bd-452b-b5c0-5a0b3db0fda8 tempest-MultipleCreateTestJSON-962420962 tempest-MultipleCreateTestJSON-962420962-project-member] Lock "bea7629d-c264-457f-b887-443f1ada1e9c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.231s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 839.726642] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cb22b0a4-bcef-4964-bd63-5abb2789cedd could not be found. [ 839.726870] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 839.727063] env[61594]: INFO nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 839.727330] env[61594]: DEBUG oslo.service.loopingcall [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 839.727668] env[61594]: DEBUG nova.compute.manager [-] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 839.727738] env[61594]: DEBUG nova.network.neutron [-] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 839.782752] env[61594]: DEBUG nova.network.neutron [-] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.783509] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9265e83706964b318026a5ebec5dc387 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.794996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9265e83706964b318026a5ebec5dc387 [ 839.794996] env[61594]: DEBUG nova.network.neutron [-] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.794996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 62d9ea0c697c4be18819740756c584db in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.810066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62d9ea0c697c4be18819740756c584db [ 839.810066] env[61594]: INFO nova.compute.manager [-] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Took 0.08 seconds to deallocate network for instance. [ 839.815021] env[61594]: DEBUG nova.compute.claims [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 839.815021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.815021] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.815021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f274226551dc4345aa4e976acc687fea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 839.863021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f274226551dc4345aa4e976acc687fea [ 840.020595] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238e5acc-e90b-40f8-b534-78ccc673b615 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.036261] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1b490ee-925e-49cf-b3c0-55cbc9a231ee {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.073656] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079cbf7d-66b8-4880-9c6f-7acfed8db472 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.082962] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2277c054-3363-4be6-99dc-e5ea5da25552 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.096875] env[61594]: DEBUG nova.compute.provider_tree [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.096938] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg e86fd6fe9d9e4199a530dbaf92b6d900 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.109962] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e86fd6fe9d9e4199a530dbaf92b6d900 [ 840.111120] env[61594]: DEBUG nova.scheduler.client.report [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 840.113738] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg d7fbd0be5c064bcab0c2f53f6e90e51b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.134353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d7fbd0be5c064bcab0c2f53f6e90e51b [ 840.135171] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.135726] env[61594]: ERROR nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Traceback (most recent call last): [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self.driver.spawn(context, instance, image_meta, [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] vm_ref = self.build_virtual_machine(instance, [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] vif_infos = vmwarevif.get_vif_info(self._session, [ 840.135726] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] for vif in network_info: [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return self._sync_wrapper(fn, *args, **kwargs) [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self.wait() [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self[:] = self._gt.wait() [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return self._exit_event.wait() [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] result = hub.switch() [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 840.136081] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return self.greenlet.switch() [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] result = function(*args, **kwargs) [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] return func(*args, **kwargs) [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] raise e [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] nwinfo = self.network_api.allocate_for_instance( [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] created_port_ids = self._update_ports_for_instance( [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] with excutils.save_and_reraise_exception(): [ 840.136737] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] self.force_reraise() [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] raise self.value [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] updated_port = self._update_port( [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] _ensure_no_port_binding_failure(port) [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] raise exception.PortBindingFailed(port_id=port['id']) [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] nova.exception.PortBindingFailed: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. [ 840.137466] env[61594]: ERROR nova.compute.manager [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] [ 840.137859] env[61594]: DEBUG nova.compute.utils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 840.138926] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Build of instance cb22b0a4-bcef-4964-bd63-5abb2789cedd was re-scheduled: Binding failed for port 7ed6abdf-02e3-4eb2-89c4-fe379e8042ad, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 840.139390] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 840.139615] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-cb22b0a4-bcef-4964-bd63-5abb2789cedd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.139762] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-cb22b0a4-bcef-4964-bd63-5abb2789cedd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.139928] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 840.140406] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg bb9e0024d8774f4fbf09994f3ca5344d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.150296] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb9e0024d8774f4fbf09994f3ca5344d [ 840.204367] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.252167] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquiring lock "5991598e-20cd-475d-bbdc-83cd4909a31e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.252167] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "5991598e-20cd-475d-bbdc-83cd4909a31e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.252167] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg f170a402232247f79f917d28ba980c23 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.264349] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f170a402232247f79f917d28ba980c23 [ 840.264897] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 840.266668] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 855006d1e59b48dc802222c54d58a96d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.305318] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 855006d1e59b48dc802222c54d58a96d [ 840.329384] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.336407] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.007s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.338332] env[61594]: INFO nova.compute.claims [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.339934] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 5eff88f115044bb0ace1b3f2fbb57b7f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.378896] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5eff88f115044bb0ace1b3f2fbb57b7f [ 840.380819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 56dd0e7b863b4fb28dc995e42a3e6954 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.391705] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 56dd0e7b863b4fb28dc995e42a3e6954 [ 840.546940] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188b5bdf-cbef-4f2c-8ca6-beddb3e102b9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.554952] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a855a460-f136-42d6-8c63-32b18f591b72 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.591703] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d71c38-1fb0-48fe-8b9d-5e8c0eda45f9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.604302] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb51b19-f04a-4251-ba4b-2134e55634de {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.620697] env[61594]: DEBUG nova.compute.provider_tree [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.621302] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg e874e2ef49fe4dc5ae3edcc7a63a09a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.634717] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e874e2ef49fe4dc5ae3edcc7a63a09a4 [ 840.638894] env[61594]: DEBUG nova.scheduler.client.report [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 840.638894] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 76f77c2f63b54498b67d16b6fc7bc8db in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.653677] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76f77c2f63b54498b67d16b6fc7bc8db [ 840.654775] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.318s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.655157] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 7d7ed719cf0e4744a4096e007c0ab74e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.671913] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d7ed719cf0e4744a4096e007c0ab74e [ 840.672882] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquiring lock "d664e3ae-4421-4c21-ab2a-82290419aeec" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 840.673197] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "d664e3ae-4421-4c21-ab2a-82290419aeec" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.673761] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg e0d69568990f4ba28aebcdafc2f17162 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.679931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e0d69568990f4ba28aebcdafc2f17162 [ 840.680909] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "d664e3ae-4421-4c21-ab2a-82290419aeec" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.008s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.681525] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 840.683362] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 8af8646881a943528e38adc1f4d043a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.732308] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8af8646881a943528e38adc1f4d043a1 [ 840.734742] env[61594]: DEBUG nova.compute.utils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 840.737745] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 0f6c53f073624e0882a34fb0bfe7f388 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.738523] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 840.738609] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 840.745606] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.746129] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 3c3ef065ae0b4d1aa89195c67c145874 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.751803] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0f6c53f073624e0882a34fb0bfe7f388 [ 840.752434] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 840.754126] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 4e7f6e9be1db4c9ca8d59a6af5131f71 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.755917] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c3ef065ae0b4d1aa89195c67c145874 [ 840.756342] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-cb22b0a4-bcef-4964-bd63-5abb2789cedd" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.756563] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 840.756762] env[61594]: DEBUG nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 840.756933] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 840.791375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e7f6e9be1db4c9ca8d59a6af5131f71 [ 840.795725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 2d77cd7f0dce4a458d9df7fda74e50d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.808415] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.809098] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 073696f60c9f4e20a63369b25bc918cd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.820900] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 073696f60c9f4e20a63369b25bc918cd [ 840.822038] env[61594]: DEBUG nova.network.neutron [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.822789] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg cd34436cc54f44df87c49c92947c3059 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.846252] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d77cd7f0dce4a458d9df7fda74e50d8 [ 840.846813] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 840.853628] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd34436cc54f44df87c49c92947c3059 [ 840.855403] env[61594]: INFO nova.compute.manager [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: cb22b0a4-bcef-4964-bd63-5abb2789cedd] Took 0.10 seconds to deallocate network for instance. [ 840.856231] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg a971128bae994198b1a69e0b78e00f5b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.895338] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 840.895690] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 840.895840] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.896058] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 840.896255] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.896436] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 840.896690] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 840.896960] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 840.897369] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 840.897427] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 840.897652] env[61594]: DEBUG nova.virt.hardware [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 840.899294] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6ade8b-1d64-40b3-aca9-1d0d3d4949ae {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.904766] env[61594]: DEBUG nova.policy [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f29927aa10404757aeb7596fc8ea9597', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '628c7ec13bef4d04b411f7ef6e39ef0f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 840.914491] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e59811-e940-491a-a36e-f64103ac23fd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.952996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a971128bae994198b1a69e0b78e00f5b [ 840.957013] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 6d5b9ea6b3734755af86e08e2e94d165 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 840.990792] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d5b9ea6b3734755af86e08e2e94d165 [ 841.023724] env[61594]: INFO nova.scheduler.client.report [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Deleted allocations for instance cb22b0a4-bcef-4964-bd63-5abb2789cedd [ 841.031109] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg c0bc3d51adc64eb687403b489aabcb2a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 841.047040] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c0bc3d51adc64eb687403b489aabcb2a [ 841.047681] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7a7d437f-2932-41bc-9bad-6754e8578037 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "cb22b0a4-bcef-4964-bd63-5abb2789cedd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.881s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 841.166196] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Successfully created port: bd06f62e-7763-4ae9-a899-977980921581 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.283889] env[61594]: DEBUG nova.compute.manager [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Received event network-changed-6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 841.284063] env[61594]: DEBUG nova.compute.manager [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Refreshing instance network info cache due to event network-changed-6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 841.284547] env[61594]: DEBUG oslo_concurrency.lockutils [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] Acquiring lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 841.284738] env[61594]: DEBUG oslo_concurrency.lockutils [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] Acquired lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.284912] env[61594]: DEBUG nova.network.neutron [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Refreshing network info cache for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.285681] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] Expecting reply to msg 0e91e2f864a64b10a5523769b06b7940 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 841.295991] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e91e2f864a64b10a5523769b06b7940 [ 841.465144] env[61594]: DEBUG nova.network.neutron [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 842.154391] env[61594]: ERROR nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. [ 842.154391] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 842.154391] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 842.154391] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 842.154391] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 842.154391] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 842.154391] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 842.154391] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 842.154391] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.154391] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 842.154391] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.154391] env[61594]: ERROR nova.compute.manager raise self.value [ 842.154391] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 842.154391] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 842.154391] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.154391] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 842.155476] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.155476] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 842.155476] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. [ 842.155476] env[61594]: ERROR nova.compute.manager [ 842.155476] env[61594]: Traceback (most recent call last): [ 842.155476] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 842.155476] env[61594]: listener.cb(fileno) [ 842.155476] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 842.155476] env[61594]: result = function(*args, **kwargs) [ 842.155476] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 842.155476] env[61594]: return func(*args, **kwargs) [ 842.155476] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 842.155476] env[61594]: raise e [ 842.155476] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 842.155476] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 842.155476] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 842.155476] env[61594]: created_port_ids = self._update_ports_for_instance( [ 842.155476] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 842.155476] env[61594]: with excutils.save_and_reraise_exception(): [ 842.155476] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.155476] env[61594]: self.force_reraise() [ 842.155476] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.155476] env[61594]: raise self.value [ 842.155476] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 842.155476] env[61594]: updated_port = self._update_port( [ 842.155476] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.155476] env[61594]: _ensure_no_port_binding_failure(port) [ 842.155476] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.155476] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 842.156880] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. [ 842.156880] env[61594]: Removing descriptor: 22 [ 842.156880] env[61594]: ERROR nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Traceback (most recent call last): [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] yield resources [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self.driver.spawn(context, instance, image_meta, [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 842.156880] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] vm_ref = self.build_virtual_machine(instance, [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] vif_infos = vmwarevif.get_vif_info(self._session, [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] for vif in network_info: [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return self._sync_wrapper(fn, *args, **kwargs) [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self.wait() [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self[:] = self._gt.wait() [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return self._exit_event.wait() [ 842.157660] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] result = hub.switch() [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return self.greenlet.switch() [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] result = function(*args, **kwargs) [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return func(*args, **kwargs) [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] raise e [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] nwinfo = self.network_api.allocate_for_instance( [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 842.158308] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] created_port_ids = self._update_ports_for_instance( [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] with excutils.save_and_reraise_exception(): [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self.force_reraise() [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] raise self.value [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] updated_port = self._update_port( [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] _ensure_no_port_binding_failure(port) [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 842.158914] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] raise exception.PortBindingFailed(port_id=port['id']) [ 842.159642] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] nova.exception.PortBindingFailed: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. [ 842.159642] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] [ 842.159642] env[61594]: INFO nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Terminating instance [ 842.159642] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Acquiring lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.228884] env[61594]: DEBUG nova.network.neutron [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.229442] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] Expecting reply to msg 52b5fc247325454ea68cc6479e319ca8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 842.239932] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 52b5fc247325454ea68cc6479e319ca8 [ 842.240583] env[61594]: DEBUG oslo_concurrency.lockutils [req-94c1ada1-fe39-476c-b259-0b6633df1edb req-78097b2a-40c1-4f15-8fac-241c7919fc03 service nova] Releasing lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.240971] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Acquired lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.241164] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 842.241714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg b6dc0e42f2a744cf8d54a602fc100f6a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 842.249925] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6dc0e42f2a744cf8d54a602fc100f6a [ 842.334358] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 842.968691] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Successfully created port: bb82d309-55b2-42a9-9769-0594e64c862b {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.004412] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.004955] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg f140c9299d334efaa0f43a5943c1a920 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 843.014815] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f140c9299d334efaa0f43a5943c1a920 [ 843.015536] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Releasing lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.015940] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 843.016160] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 843.016681] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6ea2545-dd30-4d65-9800-3aea5311ca81 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.032666] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2f82c6-cc2d-471b-b4bc-ce4135fcc7f0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.063877] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ad2881a4-9715-40ed-8489-85a6a575fb30 could not be found. [ 843.064104] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 843.064284] env[61594]: INFO nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Took 0.05 seconds to destroy the instance on the hypervisor. [ 843.065135] env[61594]: DEBUG oslo.service.loopingcall [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 843.065255] env[61594]: DEBUG nova.compute.manager [-] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 843.065255] env[61594]: DEBUG nova.network.neutron [-] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 843.148046] env[61594]: DEBUG nova.network.neutron [-] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 843.149237] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e39d6b52f382437aa6182b3d7d4e8b95 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 843.158383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e39d6b52f382437aa6182b3d7d4e8b95 [ 843.158383] env[61594]: DEBUG nova.network.neutron [-] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 843.158752] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f0e0d1a99e1648bfb7fde35133867386 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 843.168786] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0e0d1a99e1648bfb7fde35133867386 [ 843.170429] env[61594]: INFO nova.compute.manager [-] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Took 0.10 seconds to deallocate network for instance. [ 843.171806] env[61594]: DEBUG nova.compute.claims [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 843.171806] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.172020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.174661] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg eb70e43a0a6f4fb6b7e0b4a88940fa2d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 843.220791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eb70e43a0a6f4fb6b7e0b4a88940fa2d [ 843.364565] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-815599b9-b0af-4e6e-b7d8-0791a77aae0c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.377741] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fb28d5-b2b4-43c0-99ba-9ea72686f01c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.418258] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddff79ac-5769-4b5e-9746-82e89751dbd0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.428039] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e30825-fac0-4242-bce6-eb55bf786e66 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.446807] env[61594]: DEBUG nova.compute.provider_tree [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.447383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 4c92b55a935546858cbc37c30a13179c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 843.464680] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c92b55a935546858cbc37c30a13179c [ 843.466787] env[61594]: DEBUG nova.scheduler.client.report [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 843.469894] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg d76f615c97064f06808e529f763e8e16 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 843.495224] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d76f615c97064f06808e529f763e8e16 [ 843.496280] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.497108] env[61594]: ERROR nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Traceback (most recent call last): [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self.driver.spawn(context, instance, image_meta, [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self._vmops.spawn(context, instance, image_meta, injected_files, [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] vm_ref = self.build_virtual_machine(instance, [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] vif_infos = vmwarevif.get_vif_info(self._session, [ 843.497108] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] for vif in network_info: [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return self._sync_wrapper(fn, *args, **kwargs) [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self.wait() [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self[:] = self._gt.wait() [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return self._exit_event.wait() [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] result = hub.switch() [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 843.497494] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return self.greenlet.switch() [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] result = function(*args, **kwargs) [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] return func(*args, **kwargs) [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] raise e [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] nwinfo = self.network_api.allocate_for_instance( [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] created_port_ids = self._update_ports_for_instance( [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] with excutils.save_and_reraise_exception(): [ 843.497875] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] self.force_reraise() [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] raise self.value [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] updated_port = self._update_port( [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] _ensure_no_port_binding_failure(port) [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] raise exception.PortBindingFailed(port_id=port['id']) [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] nova.exception.PortBindingFailed: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. [ 843.498272] env[61594]: ERROR nova.compute.manager [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] [ 843.498595] env[61594]: DEBUG nova.compute.utils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 843.500545] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Build of instance ad2881a4-9715-40ed-8489-85a6a575fb30 was re-scheduled: Binding failed for port 6fd9c64d-1ba4-44d6-9b86-1a09fba34bc4, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 843.500998] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 843.512444] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Acquiring lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 843.512444] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Acquired lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.512444] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 843.512444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 8251d08e16ce4adc8859d56347a58e4b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 843.516992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8251d08e16ce4adc8859d56347a58e4b [ 843.607869] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 844.439678] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.440242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg e26f6ed4e3404c209c1b199beb6c6704 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 844.452798] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e26f6ed4e3404c209c1b199beb6c6704 [ 844.453476] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Releasing lock "refresh_cache-ad2881a4-9715-40ed-8489-85a6a575fb30" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 844.453693] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 844.453880] env[61594]: DEBUG nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 844.454064] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 844.604509] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 844.605146] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg d923429315a54a908f7bdf4666a0205d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 844.614597] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d923429315a54a908f7bdf4666a0205d [ 844.618198] env[61594]: DEBUG nova.network.neutron [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.618198] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg c64c890e12214579a309033ae9045f69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 844.629760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c64c890e12214579a309033ae9045f69 [ 844.630449] env[61594]: INFO nova.compute.manager [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] [instance: ad2881a4-9715-40ed-8489-85a6a575fb30] Took 0.18 seconds to deallocate network for instance. [ 844.632755] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 3997cc28efec4067afbe70a62a1e0bf6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 844.731571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3997cc28efec4067afbe70a62a1e0bf6 [ 844.734421] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg 1274f13b02284c30978ecdc51c028dd1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 844.774833] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1274f13b02284c30978ecdc51c028dd1 [ 844.801879] env[61594]: INFO nova.scheduler.client.report [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Deleted allocations for instance ad2881a4-9715-40ed-8489-85a6a575fb30 [ 844.817966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Expecting reply to msg bcd6b24625304737896a38d143f915ea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 844.837656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bcd6b24625304737896a38d143f915ea [ 844.838350] env[61594]: DEBUG oslo_concurrency.lockutils [None req-7ee73e40-d081-4f4d-ad4f-c1e96e36bd29 tempest-ServersNegativeTestMultiTenantJSON-1155338673 tempest-ServersNegativeTestMultiTenantJSON-1155338673-project-member] Lock "ad2881a4-9715-40ed-8489-85a6a575fb30" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.331s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.009228] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "88a6495c-b9b7-4822-9cae-f27594545b3e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.009468] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "88a6495c-b9b7-4822-9cae-f27594545b3e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.009938] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ad1dcee718b24e5e8d1738066d3a9e69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.021606] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ad1dcee718b24e5e8d1738066d3a9e69 [ 845.021606] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 845.022677] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ba76dbd7130d47c4bf61fbbc2c9261f3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.078984] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ba76dbd7130d47c4bf61fbbc2c9261f3 [ 845.108456] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.108762] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.110512] env[61594]: INFO nova.compute.claims [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 845.112418] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 1e26b93e1b9e4f58a573a123aca1c94a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.164567] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e26b93e1b9e4f58a573a123aca1c94a [ 845.166452] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 61569da20ccc4de8b81712af92c4526b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.176300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61569da20ccc4de8b81712af92c4526b [ 845.299673] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e033361-c483-4de1-8ced-43a9ab8183d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.308520] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba50c218-38b8-4a53-9b82-aed971efd34a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.340672] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c363dab-c455-4bf9-92ca-df1106d45163 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.348801] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19db370d-64f2-451a-80e8-6707225802d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.362679] env[61594]: DEBUG nova.compute.provider_tree [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 845.363238] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg f02cdf043cbc428b86de137504216dee in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.370938] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f02cdf043cbc428b86de137504216dee [ 845.371971] env[61594]: DEBUG nova.scheduler.client.report [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 845.374353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 5c7c9827212d461f83e822c6d84d611e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.393370] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c7c9827212d461f83e822c6d84d611e [ 845.397952] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.285s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.397952] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 845.397952] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 4b0f4b8163d34056812112e9f01cd42a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.437951] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b0f4b8163d34056812112e9f01cd42a [ 845.440153] env[61594]: DEBUG nova.compute.utils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.443019] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 7aba2229187c4372ba056a9870cd68e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.443019] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 845.443276] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.456072] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7aba2229187c4372ba056a9870cd68e1 [ 845.456703] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 845.459041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 1f3bff7ac3f64e54a21120cde5e430fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.498598] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1f3bff7ac3f64e54a21120cde5e430fe [ 845.502673] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 5cbbc3cc01bd4a16bfb3b76a87fbb0fc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.560185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5cbbc3cc01bd4a16bfb3b76a87fbb0fc [ 845.562038] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 845.604387] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.604777] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.605034] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.605354] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.605588] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.605831] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.606179] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.606695] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.606695] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.606908] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.607298] env[61594]: DEBUG nova.virt.hardware [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.608118] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-246ef0e8-caf3-4034-8892-6ad2f528fdd9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.617230] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6d63db-e88c-4793-8c10-c6eb9584dccd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.697499] env[61594]: DEBUG nova.policy [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '789177a2f7be455cadec45cf03d67521', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3dfb77f12805418eaa6127fc75becec0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 845.709303] env[61594]: DEBUG nova.compute.manager [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Received event network-changed-fece8e69-07e9-4015-ba5e-7aa4a9fb00ef {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 845.709426] env[61594]: DEBUG nova.compute.manager [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Refreshing instance network info cache due to event network-changed-fece8e69-07e9-4015-ba5e-7aa4a9fb00ef. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 845.709652] env[61594]: DEBUG oslo_concurrency.lockutils [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] Acquiring lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.709795] env[61594]: DEBUG oslo_concurrency.lockutils [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] Acquired lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.710018] env[61594]: DEBUG nova.network.neutron [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Refreshing network info cache for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 845.710593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] Expecting reply to msg 94fca75d49494f2abac146ca327e4446 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 845.721741] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 94fca75d49494f2abac146ca327e4446 [ 845.859676] env[61594]: DEBUG nova.network.neutron [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.270934] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquiring lock "0f6368a9-cadc-46b4-be16-017724580876" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.271189] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Lock "0f6368a9-cadc-46b4-be16-017724580876" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.271672] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg bb8bd7cc549f4ea2a28053163d1ff8b8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.288508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bb8bd7cc549f4ea2a28053163d1ff8b8 [ 846.289758] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 846.291578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg dadb2259d731485098291277095ef6c0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.326635] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dadb2259d731485098291277095ef6c0 [ 846.349141] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.349141] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.350478] env[61594]: INFO nova.compute.claims [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.352323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg c4041502f87a4e3fade10c0dfc8f4c07 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.394143] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4041502f87a4e3fade10c0dfc8f4c07 [ 846.396089] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 11af5a02d72749e792d0169f48756b57 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.406921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11af5a02d72749e792d0169f48756b57 [ 846.562962] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a2acc3-8e68-44fc-a5ae-9fdf78d1cbd8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.572586] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d809ad74-4752-48d4-b58d-6551c6e68ee5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.607354] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f886caec-0fd6-49b6-b35d-a949a17f1d6c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.617722] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de155fd-5542-4d69-a926-fecd8165221f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.633160] env[61594]: DEBUG nova.compute.provider_tree [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.633731] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 8514972bf5b748f7a901e679b4afae4d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.647934] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8514972bf5b748f7a901e679b4afae4d [ 846.649031] env[61594]: DEBUG nova.scheduler.client.report [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 846.651545] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 551c3785a39e49b3a15bf95d5776fee2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.668099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 551c3785a39e49b3a15bf95d5776fee2 [ 846.668977] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.320s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.669479] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 846.671188] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 3f9bc2de2bdf45db9a34a705d9e8e87f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.673218] env[61594]: DEBUG nova.network.neutron [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.673641] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] Expecting reply to msg 4f7d04bf851b4bbd87e257d9d3025406 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.684921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f7d04bf851b4bbd87e257d9d3025406 [ 846.685702] env[61594]: DEBUG oslo_concurrency.lockutils [req-abe61745-2a05-41f1-bdd9-194ec58f086c req-c34bc35d-7183-4057-a427-8fa017c09dc1 service nova] Releasing lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.699185] env[61594]: ERROR nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. [ 846.699185] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 846.699185] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 846.699185] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 846.699185] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.699185] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 846.699185] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.699185] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 846.699185] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.699185] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 846.699185] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.699185] env[61594]: ERROR nova.compute.manager raise self.value [ 846.699185] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.699185] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 846.699185] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.699185] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 846.701896] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.701896] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 846.701896] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. [ 846.701896] env[61594]: ERROR nova.compute.manager [ 846.701896] env[61594]: Traceback (most recent call last): [ 846.701896] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 846.701896] env[61594]: listener.cb(fileno) [ 846.701896] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 846.701896] env[61594]: result = function(*args, **kwargs) [ 846.701896] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 846.701896] env[61594]: return func(*args, **kwargs) [ 846.701896] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 846.701896] env[61594]: raise e [ 846.701896] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 846.701896] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 846.701896] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.701896] env[61594]: created_port_ids = self._update_ports_for_instance( [ 846.701896] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.701896] env[61594]: with excutils.save_and_reraise_exception(): [ 846.701896] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.701896] env[61594]: self.force_reraise() [ 846.701896] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.701896] env[61594]: raise self.value [ 846.701896] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.701896] env[61594]: updated_port = self._update_port( [ 846.701896] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.701896] env[61594]: _ensure_no_port_binding_failure(port) [ 846.701896] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.701896] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 846.702772] env[61594]: nova.exception.PortBindingFailed: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. [ 846.702772] env[61594]: Removing descriptor: 23 [ 846.702772] env[61594]: ERROR nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Traceback (most recent call last): [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] yield resources [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self.driver.spawn(context, instance, image_meta, [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 846.702772] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] vm_ref = self.build_virtual_machine(instance, [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] for vif in network_info: [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return self._sync_wrapper(fn, *args, **kwargs) [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self.wait() [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self[:] = self._gt.wait() [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return self._exit_event.wait() [ 846.703052] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] result = hub.switch() [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return self.greenlet.switch() [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] result = function(*args, **kwargs) [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return func(*args, **kwargs) [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] raise e [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] nwinfo = self.network_api.allocate_for_instance( [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 846.703311] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] created_port_ids = self._update_ports_for_instance( [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] with excutils.save_and_reraise_exception(): [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self.force_reraise() [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] raise self.value [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] updated_port = self._update_port( [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] _ensure_no_port_binding_failure(port) [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 846.703571] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] raise exception.PortBindingFailed(port_id=port['id']) [ 846.703805] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] nova.exception.PortBindingFailed: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. [ 846.703805] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] [ 846.703805] env[61594]: INFO nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Terminating instance [ 846.708052] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.708052] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquired lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.708052] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 846.708052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 1877dd73d07a4f1da9f2974f59aa86d2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.720909] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f9bc2de2bdf45db9a34a705d9e8e87f [ 846.722477] env[61594]: DEBUG nova.compute.utils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 846.723484] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 0122bb7eb5bc4b44bf97d53eb828487d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.724529] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1877dd73d07a4f1da9f2974f59aa86d2 [ 846.724934] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 846.741420] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0122bb7eb5bc4b44bf97d53eb828487d [ 846.742148] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 846.743810] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 5b8405b27e7548a0a951defb1e8b1880 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.787358] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b8405b27e7548a0a951defb1e8b1880 [ 846.790197] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg ef37299cc3ff49849135031125d8b3e8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 846.792830] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.825507] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef37299cc3ff49849135031125d8b3e8 [ 846.826661] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 846.873613] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.873855] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.874139] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.874356] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.874513] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.874664] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.874875] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.875051] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.875235] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.875405] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.875584] env[61594]: DEBUG nova.virt.hardware [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.877086] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31951058-549f-49ef-9a81-e5bec9e9d5ca {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.887191] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d747041d-42fc-4df2-8139-a1ff8a0ae7aa {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.907883] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.914052] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Creating folder: Project (88f0dbb8d2bc4a0d9e1c976860700368). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 846.914238] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f15e673-5497-491e-8cea-cff2a00f4397 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.931246] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Created folder: Project (88f0dbb8d2bc4a0d9e1c976860700368) in parent group-v277030. [ 846.931456] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Creating folder: Instances. Parent ref: group-v277055. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 846.931760] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc5c8939-7a77-4392-9e9b-365b31e22d63 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.948734] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Created folder: Instances in parent group-v277055. [ 846.948734] env[61594]: DEBUG oslo.service.loopingcall [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 846.948734] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 846.948734] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9b08a32-2345-4fa8-b50f-b35335af1b62 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.971851] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.971851] env[61594]: value = "task-1291422" [ 846.971851] env[61594]: _type = "Task" [ 846.971851] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.981589] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291422, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.488726] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291422, 'name': CreateVM_Task, 'duration_secs': 0.332728} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.489090] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 847.489676] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.489984] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.490463] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 847.490944] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ea3b420-3607-485c-89c8-019047d1bc4d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.501139] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Waiting for the task: (returnval){ [ 847.501139] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52592f4c-e90f-9074-4472-98817e2c8d38" [ 847.501139] env[61594]: _type = "Task" [ 847.501139] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.515346] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52592f4c-e90f-9074-4472-98817e2c8d38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.540082] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.540816] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 9f5b2ad77449421095784c66659bc19e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 847.557313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f5b2ad77449421095784c66659bc19e [ 847.557409] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Releasing lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.557919] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 847.558172] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 847.559021] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a461ef9f-0440-4c50-90f1-968415910693 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.573149] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511b5fd9-476a-408f-a350-7ee81313ebb6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.605021] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c342a36-b05d-452e-bbe1-fedf93e9f9d1 could not be found. [ 847.605021] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 847.605021] env[61594]: INFO nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Took 0.05 seconds to destroy the instance on the hypervisor. [ 847.605021] env[61594]: DEBUG oslo.service.loopingcall [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.605021] env[61594]: DEBUG nova.compute.manager [-] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 847.605253] env[61594]: DEBUG nova.network.neutron [-] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 847.659811] env[61594]: DEBUG nova.network.neutron [-] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.659811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b335f7fc19b945d892efeb91206f432e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 847.672143] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b335f7fc19b945d892efeb91206f432e [ 847.672143] env[61594]: DEBUG nova.network.neutron [-] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.672143] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6bb553e5b73244acb52748156c313a5a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 847.684018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6bb553e5b73244acb52748156c313a5a [ 847.684018] env[61594]: INFO nova.compute.manager [-] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Took 0.08 seconds to deallocate network for instance. [ 847.684018] env[61594]: DEBUG nova.compute.claims [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 847.684018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 847.684018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.686289] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 1c5a360923194858bc023be35f809643 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 847.745025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c5a360923194858bc023be35f809643 [ 847.781107] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Successfully created port: b0b09584-4bb2-4088-8468-c5351f0d451f {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 847.887135] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4089a3f-83d8-45f1-9859-7b9097620da3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.895076] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9a92d25-dbef-42cb-be1f-c077dcefbac0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.931901] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33684e62-2d16-4668-bd82-691f294dd258 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.948109] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451a117c-3ea7-4abe-89f3-669e59e49235 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.971223] env[61594]: DEBUG nova.compute.provider_tree [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 847.971223] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 4b1466e99d7e41c98b2d042aa4a486ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 847.980610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b1466e99d7e41c98b2d042aa4a486ef [ 847.986307] env[61594]: DEBUG nova.scheduler.client.report [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 847.990230] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 01b270c1431a4f77b17a919dae33f6d1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.007985] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01b270c1431a4f77b17a919dae33f6d1 [ 848.012762] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.328s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.013415] env[61594]: ERROR nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Traceback (most recent call last): [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self.driver.spawn(context, instance, image_meta, [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] vm_ref = self.build_virtual_machine(instance, [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] vif_infos = vmwarevif.get_vif_info(self._session, [ 848.013415] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] for vif in network_info: [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return self._sync_wrapper(fn, *args, **kwargs) [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self.wait() [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self[:] = self._gt.wait() [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return self._exit_event.wait() [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] result = hub.switch() [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 848.013725] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return self.greenlet.switch() [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] result = function(*args, **kwargs) [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] return func(*args, **kwargs) [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] raise e [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] nwinfo = self.network_api.allocate_for_instance( [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] created_port_ids = self._update_ports_for_instance( [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] with excutils.save_and_reraise_exception(): [ 848.014023] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] self.force_reraise() [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] raise self.value [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] updated_port = self._update_port( [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] _ensure_no_port_binding_failure(port) [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] raise exception.PortBindingFailed(port_id=port['id']) [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] nova.exception.PortBindingFailed: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. [ 848.014304] env[61594]: ERROR nova.compute.manager [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] [ 848.014543] env[61594]: DEBUG nova.compute.utils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 848.015873] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.016084] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.016617] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.017660] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Build of instance 1c342a36-b05d-452e-bbe1-fedf93e9f9d1 was re-scheduled: Binding failed for port fece8e69-07e9-4015-ba5e-7aa4a9fb00ef, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 848.017660] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 848.017660] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquiring lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.017882] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Acquired lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.017882] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.018360] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg c42e8222d4fa42f88891df402c92527b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.027935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c42e8222d4fa42f88891df402c92527b [ 848.076235] env[61594]: DEBUG nova.compute.manager [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Received event network-changed-bd06f62e-7763-4ae9-a899-977980921581 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 848.076440] env[61594]: DEBUG nova.compute.manager [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Refreshing instance network info cache due to event network-changed-bd06f62e-7763-4ae9-a899-977980921581. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 848.076652] env[61594]: DEBUG oslo_concurrency.lockutils [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] Acquiring lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.076795] env[61594]: DEBUG oslo_concurrency.lockutils [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] Acquired lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.076995] env[61594]: DEBUG nova.network.neutron [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Refreshing network info cache for port bd06f62e-7763-4ae9-a899-977980921581 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 848.078203] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] Expecting reply to msg aae183a14eee4ff18ebd60a7765a20ce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.092030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aae183a14eee4ff18ebd60a7765a20ce [ 848.106814] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.267286] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Acquiring lock "11943a7b-9afa-4e9c-84af-079ccc2c7cf1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.267286] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Lock "11943a7b-9afa-4e9c-84af-079ccc2c7cf1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.267286] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 569336513dcb4e949a59acf6a8deb7a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.268751] env[61594]: DEBUG nova.network.neutron [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.282904] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 569336513dcb4e949a59acf6a8deb7a5 [ 848.282904] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 848.282904] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg c96d85e801dd4f6eae048a1933ee0340 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.325436] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c96d85e801dd4f6eae048a1933ee0340 [ 848.353029] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 848.353029] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 848.356222] env[61594]: INFO nova.compute.claims [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 848.357966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg d34ae17416f14c369cfd2d8bd28202d1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.400202] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d34ae17416f14c369cfd2d8bd28202d1 [ 848.402346] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg e35d6d2b0e5b4260a949793469f9197a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.422459] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e35d6d2b0e5b4260a949793469f9197a [ 848.591019] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95964424-954d-405e-a08f-f862856ccfb9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.605016] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f27ebf-6bc2-4245-8118-33540e359799 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.640822] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c206b99a-c137-4ba2-9aaa-9fe41e7ea3c7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.648980] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8630da-7db6-415b-bade-3fef206dd0ec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.663407] env[61594]: DEBUG nova.compute.provider_tree [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.663927] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 002440bc666c493391637e9afae173b7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.673535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 002440bc666c493391637e9afae173b7 [ 848.674553] env[61594]: DEBUG nova.scheduler.client.report [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 848.676808] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 389b763b38b34e9d92c7d2d48cf533cb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.689506] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 389b763b38b34e9d92c7d2d48cf533cb [ 848.690278] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.337s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.690838] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 848.693194] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 63a7186cf03e4d318dc935108aeefcc3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.735450] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63a7186cf03e4d318dc935108aeefcc3 [ 848.736824] env[61594]: DEBUG nova.compute.utils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 848.737510] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 6c92906911334fdcb5e086471f74ef48 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.738408] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 848.738585] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 848.760742] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c92906911334fdcb5e086471f74ef48 [ 848.761444] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 848.763398] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 95c82444c9ac4e8a8751191034064264 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.805809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95c82444c9ac4e8a8751191034064264 [ 848.808995] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 918f5a6858094579bcf8efbe31628db7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.841394] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 918f5a6858094579bcf8efbe31628db7 [ 848.842805] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 848.872842] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 848.872842] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 848.872842] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 848.873559] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 848.873559] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 848.873559] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 848.873559] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 848.873559] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 848.874179] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 848.874179] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 848.874179] env[61594]: DEBUG nova.virt.hardware [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 848.874833] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5c6975-b32d-48b9-8f91-86c288e603c2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.884317] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d87c83c-f50f-4058-b0c7-681049d0271c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.899498] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.899638] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 3e37ee41397e43a0b09ed88a6aeb31c1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 848.918099] env[61594]: ERROR nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. [ 848.918099] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 848.918099] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 848.918099] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 848.918099] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 848.918099] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 848.918099] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 848.918099] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 848.918099] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.918099] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 848.918099] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.918099] env[61594]: ERROR nova.compute.manager raise self.value [ 848.918099] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 848.918099] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 848.918099] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.918099] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 848.918465] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.918465] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 848.918465] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. [ 848.918465] env[61594]: ERROR nova.compute.manager [ 848.918465] env[61594]: Traceback (most recent call last): [ 848.918465] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 848.918465] env[61594]: listener.cb(fileno) [ 848.918465] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 848.918465] env[61594]: result = function(*args, **kwargs) [ 848.918465] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 848.918465] env[61594]: return func(*args, **kwargs) [ 848.918465] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 848.918465] env[61594]: raise e [ 848.918465] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 848.918465] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 848.918465] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 848.918465] env[61594]: created_port_ids = self._update_ports_for_instance( [ 848.918465] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 848.918465] env[61594]: with excutils.save_and_reraise_exception(): [ 848.918465] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.918465] env[61594]: self.force_reraise() [ 848.918465] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.918465] env[61594]: raise self.value [ 848.918465] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 848.918465] env[61594]: updated_port = self._update_port( [ 848.918465] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.918465] env[61594]: _ensure_no_port_binding_failure(port) [ 848.918465] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.918465] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 848.919057] env[61594]: nova.exception.PortBindingFailed: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. [ 848.919057] env[61594]: Removing descriptor: 24 [ 848.919057] env[61594]: ERROR nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Traceback (most recent call last): [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] yield resources [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self.driver.spawn(context, instance, image_meta, [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 848.919057] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] vm_ref = self.build_virtual_machine(instance, [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] vif_infos = vmwarevif.get_vif_info(self._session, [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] for vif in network_info: [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return self._sync_wrapper(fn, *args, **kwargs) [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self.wait() [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self[:] = self._gt.wait() [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return self._exit_event.wait() [ 848.919470] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] result = hub.switch() [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return self.greenlet.switch() [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] result = function(*args, **kwargs) [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return func(*args, **kwargs) [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] raise e [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] nwinfo = self.network_api.allocate_for_instance( [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 848.919734] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] created_port_ids = self._update_ports_for_instance( [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] with excutils.save_and_reraise_exception(): [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self.force_reraise() [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] raise self.value [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] updated_port = self._update_port( [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] _ensure_no_port_binding_failure(port) [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 848.920038] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] raise exception.PortBindingFailed(port_id=port['id']) [ 848.920425] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] nova.exception.PortBindingFailed: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. [ 848.920425] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] [ 848.920425] env[61594]: INFO nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Terminating instance [ 848.921449] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e37ee41397e43a0b09ed88a6aeb31c1 [ 848.922039] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Releasing lock "refresh_cache-1c342a36-b05d-452e-bbe1-fedf93e9f9d1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.922258] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 848.922423] env[61594]: DEBUG nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 848.922627] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 848.924775] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.993874] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 848.994637] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg e9a88aaa97074bef9d0c6f9da97ec2a6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.016414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9a88aaa97074bef9d0c6f9da97ec2a6 [ 849.016414] env[61594]: DEBUG nova.network.neutron [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.016414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 3e0661dcbd6148cfa787155ff1143390 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.028442] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e0661dcbd6148cfa787155ff1143390 [ 849.028442] env[61594]: INFO nova.compute.manager [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] [instance: 1c342a36-b05d-452e-bbe1-fedf93e9f9d1] Took 0.11 seconds to deallocate network for instance. [ 849.031275] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 460ce10af41d4840b9dbb905367fff1b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.050390] env[61594]: DEBUG nova.policy [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9c1d1cb0f2d94cebb81e5e7b8f751827', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e7e2429ac7b9497d86ed5fedc91c0dec', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 849.054089] env[61594]: DEBUG nova.network.neutron [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.055035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] Expecting reply to msg b6cfd593b0c542999ab0fccc146fc055 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.066345] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6cfd593b0c542999ab0fccc146fc055 [ 849.067233] env[61594]: DEBUG oslo_concurrency.lockutils [req-7f4316a3-b721-436d-84a4-4fefb4ed49f0 req-b463c8bc-2b0a-4699-a121-bd271abb6d44 service nova] Releasing lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.067728] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.068042] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 849.068609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 290c35d472164027877d8a7f39abba0e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.089760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 290c35d472164027877d8a7f39abba0e [ 849.095927] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 460ce10af41d4840b9dbb905367fff1b [ 849.100966] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg 3344b987966442699a69deb5a7ff1f16 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.143572] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3344b987966442699a69deb5a7ff1f16 [ 849.161190] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.180281] env[61594]: INFO nova.scheduler.client.report [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Deleted allocations for instance 1c342a36-b05d-452e-bbe1-fedf93e9f9d1 [ 849.190200] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Expecting reply to msg cbf7c482ca384c2f81bd9f5777499abf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.211797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cbf7c482ca384c2f81bd9f5777499abf [ 849.212499] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a27468d7-faa8-4a7c-9b09-7d2b4445245d tempest-AttachVolumeTestJSON-246792021 tempest-AttachVolumeTestJSON-246792021-project-member] Lock "1c342a36-b05d-452e-bbe1-fedf93e9f9d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.510s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.613747] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.614335] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 12615777e95043df90952dbc7a35098d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.628990] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12615777e95043df90952dbc7a35098d [ 849.629712] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.630289] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 849.630881] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 849.631056] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eb5a255a-8fc5-45a1-a231-51ed18594b17 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.641917] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4232da3-01f8-45fa-89f3-8353d187dcae {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.673569] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b21f2e22-a6d6-4ab5-baa7-110004aa776e could not be found. [ 849.673569] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 849.673569] env[61594]: INFO nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 849.673569] env[61594]: DEBUG oslo.service.loopingcall [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 849.673809] env[61594]: DEBUG nova.compute.manager [-] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 849.673809] env[61594]: DEBUG nova.network.neutron [-] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 849.945213] env[61594]: DEBUG nova.network.neutron [-] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.946512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 6a4b3b97e95a4936bb0799ab9cfeaa6d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.956501] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6a4b3b97e95a4936bb0799ab9cfeaa6d [ 849.957217] env[61594]: DEBUG nova.network.neutron [-] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.957217] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7d3cf357fed541e8856fdd7c7f4055d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 849.970909] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7d3cf357fed541e8856fdd7c7f4055d8 [ 849.971477] env[61594]: INFO nova.compute.manager [-] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Took 0.30 seconds to deallocate network for instance. [ 849.975948] env[61594]: DEBUG nova.compute.claims [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 849.976197] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.976428] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.978297] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2033af69dbac4256866ae8a8f61b9d59 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 850.023721] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2033af69dbac4256866ae8a8f61b9d59 [ 850.176831] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-983a0331-5e61-4dce-b75e-6ceb226d93f3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.188165] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acde0da0-8c32-45d5-88d1-e5b4d6e34faf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.224020] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8ccc75-1cfa-4d41-8edf-9bb34435518a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.232405] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317dd89e-e240-4f66-90c6-cee73ee8d1a7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.247583] env[61594]: DEBUG nova.compute.provider_tree [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.248041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg e19112d27eaf43518cdbdebd6a5fce18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 850.258659] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e19112d27eaf43518cdbdebd6a5fce18 [ 850.259834] env[61594]: DEBUG nova.scheduler.client.report [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 850.262304] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 44affda6d8094457994996c0135cc083 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 850.281323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44affda6d8094457994996c0135cc083 [ 850.282409] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.306s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 850.283071] env[61594]: ERROR nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Traceback (most recent call last): [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self.driver.spawn(context, instance, image_meta, [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] vm_ref = self.build_virtual_machine(instance, [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] vif_infos = vmwarevif.get_vif_info(self._session, [ 850.283071] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] for vif in network_info: [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return self._sync_wrapper(fn, *args, **kwargs) [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self.wait() [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self[:] = self._gt.wait() [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return self._exit_event.wait() [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] result = hub.switch() [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 850.283446] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return self.greenlet.switch() [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] result = function(*args, **kwargs) [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] return func(*args, **kwargs) [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] raise e [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] nwinfo = self.network_api.allocate_for_instance( [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] created_port_ids = self._update_ports_for_instance( [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] with excutils.save_and_reraise_exception(): [ 850.283828] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] self.force_reraise() [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] raise self.value [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] updated_port = self._update_port( [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] _ensure_no_port_binding_failure(port) [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] raise exception.PortBindingFailed(port_id=port['id']) [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] nova.exception.PortBindingFailed: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. [ 850.284254] env[61594]: ERROR nova.compute.manager [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] [ 850.284951] env[61594]: DEBUG nova.compute.utils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 850.285830] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Build of instance b21f2e22-a6d6-4ab5-baa7-110004aa776e was re-scheduled: Binding failed for port bd06f62e-7763-4ae9-a899-977980921581, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 850.286156] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 850.286427] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.286609] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.286806] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 850.287249] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 00dfad78064e4da0b0f2b49e59be9925 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 850.298563] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00dfad78064e4da0b0f2b49e59be9925 [ 850.376015] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 850.547228] env[61594]: DEBUG nova.compute.manager [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Received event network-changed-bb82d309-55b2-42a9-9769-0594e64c862b {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 850.547228] env[61594]: DEBUG nova.compute.manager [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Refreshing instance network info cache due to event network-changed-bb82d309-55b2-42a9-9769-0594e64c862b. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 850.547228] env[61594]: DEBUG oslo_concurrency.lockutils [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] Acquiring lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.547228] env[61594]: DEBUG oslo_concurrency.lockutils [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] Acquired lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.547228] env[61594]: DEBUG nova.network.neutron [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Refreshing network info cache for port bb82d309-55b2-42a9-9769-0594e64c862b {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 850.549323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] Expecting reply to msg 01e2a7a698094c6c90acc9fbb23c2180 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 850.560271] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01e2a7a698094c6c90acc9fbb23c2180 [ 850.757809] env[61594]: DEBUG nova.network.neutron [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 850.945261] env[61594]: ERROR nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. [ 850.945261] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 850.945261] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 850.945261] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 850.945261] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 850.945261] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 850.945261] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 850.945261] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 850.945261] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 850.945261] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 850.945261] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 850.945261] env[61594]: ERROR nova.compute.manager raise self.value [ 850.945261] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 850.945261] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 850.945261] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 850.945261] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 850.945763] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 850.945763] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 850.945763] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. [ 850.945763] env[61594]: ERROR nova.compute.manager [ 850.945763] env[61594]: Traceback (most recent call last): [ 850.945763] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 850.945763] env[61594]: listener.cb(fileno) [ 850.945763] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 850.945763] env[61594]: result = function(*args, **kwargs) [ 850.945763] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 850.945763] env[61594]: return func(*args, **kwargs) [ 850.945763] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 850.945763] env[61594]: raise e [ 850.945763] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 850.945763] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 850.945763] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 850.945763] env[61594]: created_port_ids = self._update_ports_for_instance( [ 850.945763] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 850.945763] env[61594]: with excutils.save_and_reraise_exception(): [ 850.945763] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 850.945763] env[61594]: self.force_reraise() [ 850.945763] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 850.945763] env[61594]: raise self.value [ 850.945763] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 850.945763] env[61594]: updated_port = self._update_port( [ 850.945763] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 850.945763] env[61594]: _ensure_no_port_binding_failure(port) [ 850.945763] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 850.945763] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 850.946478] env[61594]: nova.exception.PortBindingFailed: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. [ 850.946478] env[61594]: Removing descriptor: 20 [ 850.946478] env[61594]: ERROR nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Traceback (most recent call last): [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] yield resources [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self.driver.spawn(context, instance, image_meta, [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 850.946478] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] vm_ref = self.build_virtual_machine(instance, [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] vif_infos = vmwarevif.get_vif_info(self._session, [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] for vif in network_info: [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return self._sync_wrapper(fn, *args, **kwargs) [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self.wait() [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self[:] = self._gt.wait() [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return self._exit_event.wait() [ 850.946759] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] result = hub.switch() [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return self.greenlet.switch() [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] result = function(*args, **kwargs) [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return func(*args, **kwargs) [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] raise e [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] nwinfo = self.network_api.allocate_for_instance( [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 850.947115] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] created_port_ids = self._update_ports_for_instance( [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] with excutils.save_and_reraise_exception(): [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self.force_reraise() [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] raise self.value [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] updated_port = self._update_port( [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] _ensure_no_port_binding_failure(port) [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 850.947506] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] raise exception.PortBindingFailed(port_id=port['id']) [ 850.947817] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] nova.exception.PortBindingFailed: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. [ 850.947817] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] [ 850.947817] env[61594]: INFO nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Terminating instance [ 850.951806] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquiring lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.986977] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Acquiring lock "b56a9692-3745-4513-879c-4298716c5e81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 850.986977] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Lock "b56a9692-3745-4513-879c-4298716c5e81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 850.986977] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg c2de4f64180a413e9b923ee0f6a2fed8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.008692] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c2de4f64180a413e9b923ee0f6a2fed8 [ 851.009788] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 851.011554] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 2a7c3a2b38c0445eb86457d722747619 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.026237] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Successfully created port: 82010836-c561-4ba9-bf30-048d661c0450 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.063779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a7c3a2b38c0445eb86457d722747619 [ 851.092110] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.092255] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.094591] env[61594]: INFO nova.compute.claims [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 851.096687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 3b5cb2a8a3884d3ea8db44bc50492238 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.128109] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.128109] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 403260ac2c244da4b4783700290a7f25 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.138642] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3b5cb2a8a3884d3ea8db44bc50492238 [ 851.140481] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg e4224efbc2da4285b9a6284e1521abea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.149322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 403260ac2c244da4b4783700290a7f25 [ 851.149322] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-b21f2e22-a6d6-4ab5-baa7-110004aa776e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.149322] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 851.149670] env[61594]: DEBUG nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 851.149670] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 851.153784] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4224efbc2da4285b9a6284e1521abea [ 851.241214] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 851.241447] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 04bd8f920b364d85af284d77af16c35f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.252371] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04bd8f920b364d85af284d77af16c35f [ 851.252759] env[61594]: DEBUG nova.network.neutron [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.253158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg d520412940a14d92bfaf1a51567edd8e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.268708] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d520412940a14d92bfaf1a51567edd8e [ 851.269742] env[61594]: INFO nova.compute.manager [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: b21f2e22-a6d6-4ab5-baa7-110004aa776e] Took 0.12 seconds to deallocate network for instance. [ 851.271358] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg b4df8c2f9c53432a8e3751d5f2e2ca4d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.325235] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4df8c2f9c53432a8e3751d5f2e2ca4d [ 851.329962] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg aef5491c5363497bb1807a55e323a046 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.372544] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331ead22-5c45-4e1f-bd22-e665a8048ce4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.383533] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f2ee17-4b08-42f9-993e-30888a515213 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.391626] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aef5491c5363497bb1807a55e323a046 [ 851.428662] env[61594]: INFO nova.scheduler.client.report [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Deleted allocations for instance b21f2e22-a6d6-4ab5-baa7-110004aa776e [ 851.434423] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 745cd2ed8e35479a90758385811fb7ee in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.436486] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c038ceab-24b5-4721-be15-921932b73315 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.445342] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7e103b-b744-480a-9b6b-c75b0aafe769 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.463521] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 745cd2ed8e35479a90758385811fb7ee [ 851.463521] env[61594]: DEBUG nova.compute.provider_tree [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.463521] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 2d59c0bc9ede42178f4f696a8839f063 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.463521] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0ac45a8d-bf06-4806-ba46-b01bb5244a37 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "b21f2e22-a6d6-4ab5-baa7-110004aa776e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.711s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.478593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2d59c0bc9ede42178f4f696a8839f063 [ 851.481234] env[61594]: DEBUG nova.scheduler.client.report [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 851.484657] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 1ea56074134348c99e914333a9ddfba0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.512847] env[61594]: DEBUG nova.network.neutron [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.512847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] Expecting reply to msg aaa3401e26e541909ce59012327961a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.516310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ea56074134348c99e914333a9ddfba0 [ 851.516899] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.425s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.517390] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 851.520115] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg a0fa913ec0ab46778f7d7fd96e6f9522 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.530159] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aaa3401e26e541909ce59012327961a9 [ 851.531399] env[61594]: DEBUG oslo_concurrency.lockutils [req-f747afa5-7b01-4355-a4bb-50cece0f85ee req-6627d10e-bfbe-435d-8291-0be22c0bf8d0 service nova] Releasing lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.531399] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquired lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.531524] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 851.531948] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 012049371a294b87aee7498a135f0b91 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.544375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 012049371a294b87aee7498a135f0b91 [ 851.569017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a0fa913ec0ab46778f7d7fd96e6f9522 [ 851.571539] env[61594]: DEBUG nova.compute.utils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 851.572421] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 91159d71859d45d89f3c65d532c12a0b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.573649] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 851.573846] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 851.585368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 91159d71859d45d89f3c65d532c12a0b [ 851.587278] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 851.592116] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 5e61bd95772641dfb79cb895806871c1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.612595] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 851.644336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e61bd95772641dfb79cb895806871c1 [ 851.647386] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 5ee0817c27a749329d597882c651f7c6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 851.692817] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ee0817c27a749329d597882c651f7c6 [ 851.694353] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 851.738934] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.739255] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.739462] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.739663] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.739849] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.742399] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.742399] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.742399] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.742399] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.742933] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.742933] env[61594]: DEBUG nova.virt.hardware [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.744604] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cb655a-602c-42c2-b223-cc299c217815 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.755974] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b09b1d-d71d-4870-9070-6e63fffdd53d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.809861] env[61594]: DEBUG nova.policy [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62706b3903c046f1b262416bcf373961', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8282d1df83f045fc9c91f25895b631de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 852.075947] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.076516] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg d22b4ed70cf244a49ea42c14ee66f9af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 852.085992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d22b4ed70cf244a49ea42c14ee66f9af [ 852.086648] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Releasing lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.087059] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 852.087254] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 852.087775] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b439a8e-3d28-4219-84fb-15a63b6dd812 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.099015] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fca2eea-19d1-4f91-bfb5-85a948d613a4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.128713] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5991598e-20cd-475d-bbdc-83cd4909a31e could not be found. [ 852.128908] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 852.129110] env[61594]: INFO nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 852.129363] env[61594]: DEBUG oslo.service.loopingcall [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 852.129600] env[61594]: DEBUG nova.compute.manager [-] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 852.129709] env[61594]: DEBUG nova.network.neutron [-] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 852.191112] env[61594]: DEBUG nova.network.neutron [-] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 852.191784] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e18509423d01485aa514a625b274bec0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 852.200812] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e18509423d01485aa514a625b274bec0 [ 852.201302] env[61594]: DEBUG nova.network.neutron [-] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.202109] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 440e7f4f06264afa8cd7fad9a0c36c25 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 852.216043] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 440e7f4f06264afa8cd7fad9a0c36c25 [ 852.216043] env[61594]: INFO nova.compute.manager [-] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Took 0.09 seconds to deallocate network for instance. [ 852.218307] env[61594]: DEBUG nova.compute.claims [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 852.218531] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 852.218751] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 852.224021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 72d048c0de9c46098ccb9497f544f69e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 852.266778] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72d048c0de9c46098ccb9497f544f69e [ 852.417513] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd1793e-cf36-4894-a47d-facc831611e3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.427756] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215c583b-06c4-4b23-8a54-60e2e2338e6f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.460139] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1b9359-fdb2-488e-8cd6-7f1187c57472 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.468841] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ecc7d1-f161-47cb-9537-391148d33dec {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.486417] env[61594]: DEBUG nova.compute.provider_tree [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.487126] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 51060032276a418c8313d699a3b96160 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 852.499681] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51060032276a418c8313d699a3b96160 [ 852.500954] env[61594]: DEBUG nova.scheduler.client.report [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 852.503577] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 60de83fa7a6048de81a83a0c1daeda11 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 852.524280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 60de83fa7a6048de81a83a0c1daeda11 [ 852.525262] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.306s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 852.525900] env[61594]: ERROR nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Traceback (most recent call last): [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self.driver.spawn(context, instance, image_meta, [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] vm_ref = self.build_virtual_machine(instance, [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] vif_infos = vmwarevif.get_vif_info(self._session, [ 852.525900] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] for vif in network_info: [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return self._sync_wrapper(fn, *args, **kwargs) [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self.wait() [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self[:] = self._gt.wait() [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return self._exit_event.wait() [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] result = hub.switch() [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 852.526201] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return self.greenlet.switch() [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] result = function(*args, **kwargs) [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] return func(*args, **kwargs) [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] raise e [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] nwinfo = self.network_api.allocate_for_instance( [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] created_port_ids = self._update_ports_for_instance( [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] with excutils.save_and_reraise_exception(): [ 852.526472] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] self.force_reraise() [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] raise self.value [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] updated_port = self._update_port( [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] _ensure_no_port_binding_failure(port) [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] raise exception.PortBindingFailed(port_id=port['id']) [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] nova.exception.PortBindingFailed: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. [ 852.526737] env[61594]: ERROR nova.compute.manager [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] [ 852.526968] env[61594]: DEBUG nova.compute.utils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 852.528608] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Build of instance 5991598e-20cd-475d-bbdc-83cd4909a31e was re-scheduled: Binding failed for port bb82d309-55b2-42a9-9769-0594e64c862b, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 852.529113] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 852.529411] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquiring lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.529621] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Acquired lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.529849] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 852.530338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg be7847996df0487787a8dc9e989df68c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 852.543700] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be7847996df0487787a8dc9e989df68c [ 852.760068] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.266138] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.270359] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 84421cc840a64fc199d1000aba1a4306 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.279281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 84421cc840a64fc199d1000aba1a4306 [ 853.279965] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Releasing lock "refresh_cache-5991598e-20cd-475d-bbdc-83cd4909a31e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.280184] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 853.280373] env[61594]: DEBUG nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 853.280580] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 853.354289] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.354289] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg c3cbc717d1624ff988f094f9efc59528 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.400963] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3cbc717d1624ff988f094f9efc59528 [ 853.400963] env[61594]: DEBUG nova.network.neutron [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.401334] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg f68d57f2dd6d4de0b8cba9352c120ad0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.416383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f68d57f2dd6d4de0b8cba9352c120ad0 [ 853.418598] env[61594]: INFO nova.compute.manager [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] [instance: 5991598e-20cd-475d-bbdc-83cd4909a31e] Took 0.14 seconds to deallocate network for instance. [ 853.420264] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg bc2714edc39a4f67b446d7b3c3cf6007 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.434557] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Successfully created port: 9cc9c772-a915-4d6d-8e40-aa6981387e9b {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.487956] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc2714edc39a4f67b446d7b3c3cf6007 [ 853.491895] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 0602d44f3c6a45ed88a7bf5b3a5eb024 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.532414] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0602d44f3c6a45ed88a7bf5b3a5eb024 [ 853.566583] env[61594]: INFO nova.scheduler.client.report [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Deleted allocations for instance 5991598e-20cd-475d-bbdc-83cd4909a31e [ 853.576323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Expecting reply to msg 01ce7929953d428585bb7223bb218cec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.605206] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01ce7929953d428585bb7223bb218cec [ 853.605943] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ed842645-5af2-49f1-9a07-bccc4a87cac5 tempest-ServerGroupTestJSON-928422351 tempest-ServerGroupTestJSON-928422351-project-member] Lock "5991598e-20cd-475d-bbdc-83cd4909a31e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.354s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.809870] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Acquiring lock "b89e3f53-d9cd-400a-8ba9-83e328a59de8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.810110] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Lock "b89e3f53-d9cd-400a-8ba9-83e328a59de8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.810597] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 16e39e42581b4cefbfef21997294c3a4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.823243] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 16e39e42581b4cefbfef21997294c3a4 [ 853.823728] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 853.825542] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 15cf543cd0ca46e0b0f8c221fd62036a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.864513] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15cf543cd0ca46e0b0f8c221fd62036a [ 853.891653] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 853.891886] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.893529] env[61594]: INFO nova.compute.claims [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.895277] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 5195de808e8a4b9eb6e4ad1b81c617b3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.897383] env[61594]: ERROR nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. [ 853.897383] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 853.897383] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 853.897383] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 853.897383] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.897383] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 853.897383] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.897383] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 853.897383] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.897383] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 853.897383] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.897383] env[61594]: ERROR nova.compute.manager raise self.value [ 853.897383] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.897383] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 853.897383] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.897383] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 853.897778] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.897778] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 853.897778] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. [ 853.897778] env[61594]: ERROR nova.compute.manager [ 853.897778] env[61594]: Traceback (most recent call last): [ 853.897778] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 853.897778] env[61594]: listener.cb(fileno) [ 853.897778] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 853.897778] env[61594]: result = function(*args, **kwargs) [ 853.897778] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.897778] env[61594]: return func(*args, **kwargs) [ 853.897778] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 853.897778] env[61594]: raise e [ 853.897778] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 853.897778] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 853.897778] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.897778] env[61594]: created_port_ids = self._update_ports_for_instance( [ 853.897778] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.897778] env[61594]: with excutils.save_and_reraise_exception(): [ 853.897778] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.897778] env[61594]: self.force_reraise() [ 853.897778] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.897778] env[61594]: raise self.value [ 853.897778] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.897778] env[61594]: updated_port = self._update_port( [ 853.897778] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.897778] env[61594]: _ensure_no_port_binding_failure(port) [ 853.897778] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.897778] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 853.898470] env[61594]: nova.exception.PortBindingFailed: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. [ 853.898470] env[61594]: Removing descriptor: 21 [ 853.898470] env[61594]: ERROR nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Traceback (most recent call last): [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] yield resources [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self.driver.spawn(context, instance, image_meta, [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.898470] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] vm_ref = self.build_virtual_machine(instance, [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] for vif in network_info: [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return self._sync_wrapper(fn, *args, **kwargs) [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self.wait() [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self[:] = self._gt.wait() [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return self._exit_event.wait() [ 853.898747] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] result = hub.switch() [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return self.greenlet.switch() [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] result = function(*args, **kwargs) [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return func(*args, **kwargs) [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] raise e [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] nwinfo = self.network_api.allocate_for_instance( [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.899041] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] created_port_ids = self._update_ports_for_instance( [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] with excutils.save_and_reraise_exception(): [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self.force_reraise() [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] raise self.value [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] updated_port = self._update_port( [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] _ensure_no_port_binding_failure(port) [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.899332] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] raise exception.PortBindingFailed(port_id=port['id']) [ 853.899615] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] nova.exception.PortBindingFailed: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. [ 853.899615] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] [ 853.899615] env[61594]: INFO nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Terminating instance [ 853.900794] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-88a6495c-b9b7-4822-9cae-f27594545b3e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.900964] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-88a6495c-b9b7-4822-9cae-f27594545b3e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.901158] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 853.901548] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 0723d241a2604c61b1725bc22be64320 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.908207] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0723d241a2604c61b1725bc22be64320 [ 853.934034] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.939411] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5195de808e8a4b9eb6e4ad1b81c617b3 [ 853.941911] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 2bf3a0ca071943669fc530b39ece8ce3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 853.952622] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bf3a0ca071943669fc530b39ece8ce3 [ 854.105843] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f914c71-9221-43f5-ad61-6a160fb4c3e7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.114604] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5109a45-1b9a-4006-8649-1b699259b0d8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.150086] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d70e93-0cfe-4916-a6fe-dd4d5499fcba {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.158684] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0833df-d63c-4b07-9357-46bf8081dea6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.173984] env[61594]: DEBUG nova.compute.provider_tree [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.174504] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg d020beeab81b4f40b158cdcecb94b7e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.188538] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d020beeab81b4f40b158cdcecb94b7e1 [ 854.189561] env[61594]: DEBUG nova.scheduler.client.report [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 854.191913] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 998c4fcb28024bcc9d88ec119610e2da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.208280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 998c4fcb28024bcc9d88ec119610e2da [ 854.209892] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.317s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.209892] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 854.211377] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 81b3aba75c674210a7c7f4c0f39d22ea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.254052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 81b3aba75c674210a7c7f4c0f39d22ea [ 854.255020] env[61594]: DEBUG nova.compute.utils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 854.255621] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 6018ef0df79740cea2aac0618dd41595 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.256486] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 854.256662] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 854.268098] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6018ef0df79740cea2aac0618dd41595 [ 854.268894] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 854.270686] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 8878f1d8b5fb4149a6fe33cfe634ebc6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.302943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8878f1d8b5fb4149a6fe33cfe634ebc6 [ 854.304508] env[61594]: INFO nova.virt.block_device [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Booting with volume 99ac65c2-3880-4653-9c73-253afc76a2f5 at /dev/sda [ 854.347197] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-391edc62-4b5a-4baa-b5d8-41cd46edb2ad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.357141] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7801f72-cbb4-462c-81c7-83c990c3b78f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.385316] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-592c4e2f-f821-410b-b1be-c4b9b36fbbf3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.395310] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b537ef9e-df24-4a95-a13e-aa899b9aea68 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.428133] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414a2cdf-43ab-42a3-b1e3-2654a2bc26be {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.439311] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968fddf1-b519-40fa-88da-b4a6c423bc59 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.449624] env[61594]: DEBUG nova.policy [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d301f3e5574407e9b13ce81a785466c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a6790faf73648b89498c2305f6aea85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 854.456564] env[61594]: DEBUG nova.virt.block_device [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Updating existing volume attachment record: d2bd16d1-cf71-459c-9e40-5e5598e9c8f7 {{(pid=61594) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 854.470021] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.470694] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 8472e38164504fabb018d914e5745ed2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.484044] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8472e38164504fabb018d914e5745ed2 [ 854.484682] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-88a6495c-b9b7-4822-9cae-f27594545b3e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.485499] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 854.485730] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 854.487063] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e125b1c3-a17e-4e7d-adbe-29569f253125 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.498540] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648adc84-54f9-45ff-921e-1625bcb2ac7a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.528449] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88a6495c-b9b7-4822-9cae-f27594545b3e could not be found. [ 854.528769] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 854.529109] env[61594]: INFO nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 854.529359] env[61594]: DEBUG oslo.service.loopingcall [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 854.529990] env[61594]: DEBUG nova.compute.manager [-] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 854.530139] env[61594]: DEBUG nova.network.neutron [-] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 854.564612] env[61594]: DEBUG nova.network.neutron [-] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.565560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg ce20f3a8cac94f2d812399937e855def in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.575978] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ce20f3a8cac94f2d812399937e855def [ 854.576821] env[61594]: DEBUG nova.network.neutron [-] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.577037] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3056c981cc99465bb3caa355c0f154a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.590873] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3056c981cc99465bb3caa355c0f154a9 [ 854.591127] env[61594]: INFO nova.compute.manager [-] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Took 0.06 seconds to deallocate network for instance. [ 854.594019] env[61594]: DEBUG nova.compute.claims [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 854.594019] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 854.594131] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 854.596096] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 853494d33e0246a08cfe12165e81c6be in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.609029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 878a24fdc4a44beaa27715860251ec51 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.621932] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 878a24fdc4a44beaa27715860251ec51 [ 854.643042] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 853494d33e0246a08cfe12165e81c6be [ 854.672601] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 086768e52fe54517844a6b9a7f6f31f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.686944] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 086768e52fe54517844a6b9a7f6f31f4 [ 854.687176] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg b3cf2e7543e948ccaf95b06b8ad10b00 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.734877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3cf2e7543e948ccaf95b06b8ad10b00 [ 854.735781] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 854.736479] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 854.737126] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 854.737723] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.738120] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 854.738274] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.738542] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 854.739112] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 854.739210] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 854.739840] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 854.739840] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 854.739840] env[61594]: DEBUG nova.virt.hardware [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 854.744424] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad8a206-5b33-4203-af72-72663a5b24ba {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.757247] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be912e5c-1b83-43c1-8155-59716be4c572 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.843557] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1256fe9b-df29-4bdc-8da3-ba4c7d5fb3cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.852464] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f86d64-6408-42b8-b2bb-b6b4b9856ebd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.890502] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222eefe5-44f1-40ab-b995-f2416ce6349d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.900076] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afa165f-1de8-4aee-99c4-7ba620d30c98 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.915660] env[61594]: DEBUG nova.compute.provider_tree [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.916192] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg aad527d48e424554acda5bcc65b2542f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.925031] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg aad527d48e424554acda5bcc65b2542f [ 854.926192] env[61594]: DEBUG nova.scheduler.client.report [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 854.928869] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 0e486f17496d4e3e85fc7c227449323b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.943795] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e486f17496d4e3e85fc7c227449323b [ 854.944748] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.351s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 854.945432] env[61594]: ERROR nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Traceback (most recent call last): [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self.driver.spawn(context, instance, image_meta, [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] vm_ref = self.build_virtual_machine(instance, [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] vif_infos = vmwarevif.get_vif_info(self._session, [ 854.945432] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] for vif in network_info: [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return self._sync_wrapper(fn, *args, **kwargs) [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self.wait() [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self[:] = self._gt.wait() [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return self._exit_event.wait() [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] result = hub.switch() [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 854.945711] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return self.greenlet.switch() [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] result = function(*args, **kwargs) [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] return func(*args, **kwargs) [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] raise e [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] nwinfo = self.network_api.allocate_for_instance( [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] created_port_ids = self._update_ports_for_instance( [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] with excutils.save_and_reraise_exception(): [ 854.946076] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] self.force_reraise() [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] raise self.value [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] updated_port = self._update_port( [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] _ensure_no_port_binding_failure(port) [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] raise exception.PortBindingFailed(port_id=port['id']) [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] nova.exception.PortBindingFailed: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. [ 854.946473] env[61594]: ERROR nova.compute.manager [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] [ 854.946774] env[61594]: DEBUG nova.compute.utils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 854.948137] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Build of instance 88a6495c-b9b7-4822-9cae-f27594545b3e was re-scheduled: Binding failed for port b0b09584-4bb2-4088-8468-c5351f0d451f, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 854.948661] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 854.948986] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquiring lock "refresh_cache-88a6495c-b9b7-4822-9cae-f27594545b3e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.949120] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Acquired lock "refresh_cache-88a6495c-b9b7-4822-9cae-f27594545b3e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.949518] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 854.949762] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg ec6f9a799e904d918578405bec3f53af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 854.961288] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec6f9a799e904d918578405bec3f53af [ 855.014379] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.256724] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "ead7b401-1fd8-49b8-958a-2537714f22ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.256981] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "ead7b401-1fd8-49b8-958a-2537714f22ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.257481] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 19feb584934f4337bb693444403c7a73 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.273452] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19feb584934f4337bb693444403c7a73 [ 855.273987] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 855.276165] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ebbfaa5670da4e43be228ba631439692 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.323344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebbfaa5670da4e43be228ba631439692 [ 855.345929] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.346177] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.347749] env[61594]: INFO nova.compute.claims [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.349321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg fcf7d6c1b5544e0a9be42bf6ac6186d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.384943] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.385612] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg eefb1bec6d6d4cb68978588fff8a1301 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.391578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fcf7d6c1b5544e0a9be42bf6ac6186d7 [ 855.393865] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 283711f2a8bc41bcb0ac627adc3c3c4d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.395361] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg eefb1bec6d6d4cb68978588fff8a1301 [ 855.395433] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Releasing lock "refresh_cache-88a6495c-b9b7-4822-9cae-f27594545b3e" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.395583] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 855.395758] env[61594]: DEBUG nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 855.397089] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.404514] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 283711f2a8bc41bcb0ac627adc3c3c4d [ 855.547451] env[61594]: DEBUG nova.compute.manager [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Received event network-changed-82010836-c561-4ba9-bf30-048d661c0450 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 855.547451] env[61594]: DEBUG nova.compute.manager [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Refreshing instance network info cache due to event network-changed-82010836-c561-4ba9-bf30-048d661c0450. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 855.547451] env[61594]: DEBUG oslo_concurrency.lockutils [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] Acquiring lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.547451] env[61594]: DEBUG oslo_concurrency.lockutils [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] Acquired lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.547451] env[61594]: DEBUG nova.network.neutron [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Refreshing network info cache for port 82010836-c561-4ba9-bf30-048d661c0450 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 855.547686] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] Expecting reply to msg 9f007cab98ff4085899a4c922b451a82 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.555419] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f007cab98ff4085899a4c922b451a82 [ 855.575900] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7aa6915-62c1-423c-a0ec-75ac88f7379d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.585139] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fedcaf2-349b-474a-a6f5-ed68ca332355 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.623268] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e7f29f-4ebf-4e79-bcc6-b3af71d780ce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.633743] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6376c686-3f8d-4795-befe-0cca20170272 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.650083] env[61594]: DEBUG nova.compute.provider_tree [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.651617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg a1955230021b4ba4ae0c9c54a291b39c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.654050] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.654576] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg a33ae63f3ff3478bb5cd211997cb5a9a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.660536] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a1955230021b4ba4ae0c9c54a291b39c [ 855.661478] env[61594]: DEBUG nova.scheduler.client.report [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 855.666049] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg e54c5ef0b6244e1c80d386c6fa40055c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.666505] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a33ae63f3ff3478bb5cd211997cb5a9a [ 855.666954] env[61594]: DEBUG nova.network.neutron [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.667980] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg af3f862e526c4dacbf5de622bfb66f52 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.679508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af3f862e526c4dacbf5de622bfb66f52 [ 855.680124] env[61594]: INFO nova.compute.manager [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] [instance: 88a6495c-b9b7-4822-9cae-f27594545b3e] Took 0.28 seconds to deallocate network for instance. [ 855.682204] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg c7e19af6bf6b4b868714e8ddae282227 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.683491] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e54c5ef0b6244e1c80d386c6fa40055c [ 855.684604] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.338s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.685138] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 855.686653] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 3c7f8fe78f3a40d3a11e7901f1594bd0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.702128] env[61594]: DEBUG nova.network.neutron [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 855.727767] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c7f8fe78f3a40d3a11e7901f1594bd0 [ 855.728393] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7e19af6bf6b4b868714e8ddae282227 [ 855.729789] env[61594]: DEBUG nova.compute.utils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 855.730425] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 15ffb68d4e464ffd91d5b3acb8cb2196 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.733190] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 606a53eb16a04f74b593c036f3b29b0e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.734246] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 855.734417] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 855.741173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15ffb68d4e464ffd91d5b3acb8cb2196 [ 855.741757] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 855.743327] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg bbf6c359597d4b4e919113349f5f5499 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.767436] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 606a53eb16a04f74b593c036f3b29b0e [ 855.776060] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbf6c359597d4b4e919113349f5f5499 [ 855.778850] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg fb0e4602915342cdbba33f4571579dfe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.793356] env[61594]: WARNING oslo_vmware.rw_handles [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 855.793356] env[61594]: ERROR oslo_vmware.rw_handles [ 855.793698] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 855.795701] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 855.795954] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Copying Virtual Disk [datastore1] vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/11010209-ab53-4a8d-b910-9e65e6afa990/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 855.798158] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05d58202-bba5-4b54-8886-540ccf82811d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.802025] env[61594]: INFO nova.scheduler.client.report [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Deleted allocations for instance 88a6495c-b9b7-4822-9cae-f27594545b3e [ 855.813557] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Expecting reply to msg 653fce5943a74df09630bdbdcc09171e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 855.814940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fb0e4602915342cdbba33f4571579dfe [ 855.816168] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 855.822633] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Waiting for the task: (returnval){ [ 855.822633] env[61594]: value = "task-1291424" [ 855.822633] env[61594]: _type = "Task" [ 855.822633] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.831915] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Task: {'id': task-1291424, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.838715] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 653fce5943a74df09630bdbdcc09171e [ 855.838715] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3352acb2-2519-4d95-a938-7905a23f9309 tempest-ServersTestJSON-1597439793 tempest-ServersTestJSON-1597439793-project-member] Lock "88a6495c-b9b7-4822-9cae-f27594545b3e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.829s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.858343] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 855.858501] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 855.858620] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 855.858799] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 855.858939] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 855.859105] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 855.859323] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 855.859479] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 855.859644] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 855.859811] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 855.859982] env[61594]: DEBUG nova.virt.hardware [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 855.861559] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203f3356-28e1-4894-b8d6-10b55d44e39a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.873170] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5fd465-811f-426e-afc7-543344583e5d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.936461] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Successfully created port: a230a46e-0d35-42de-8308-d37547cc8465 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 855.954266] env[61594]: DEBUG nova.policy [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f05a61e0f6499bb35c44d254226249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bff3be1976444e58a2b7be93d47f50ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 856.177185] env[61594]: DEBUG nova.network.neutron [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.177185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] Expecting reply to msg c8d9faa8167742f6b5ff1433e2754b4c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 856.189901] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c8d9faa8167742f6b5ff1433e2754b4c [ 856.189901] env[61594]: DEBUG oslo_concurrency.lockutils [req-2fd7517f-a448-4115-bb82-f81984753e58 req-92e64a57-7342-4c9c-ae10-f28908b871b6 service nova] Releasing lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.241121] env[61594]: ERROR nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. [ 856.241121] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 856.241121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 856.241121] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 856.241121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 856.241121] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 856.241121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 856.241121] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 856.241121] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 856.241121] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 856.241121] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 856.241121] env[61594]: ERROR nova.compute.manager raise self.value [ 856.241121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 856.241121] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 856.241121] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 856.241121] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 856.241574] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 856.241574] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 856.241574] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. [ 856.241574] env[61594]: ERROR nova.compute.manager [ 856.241574] env[61594]: Traceback (most recent call last): [ 856.241574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 856.241574] env[61594]: listener.cb(fileno) [ 856.241574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 856.241574] env[61594]: result = function(*args, **kwargs) [ 856.241574] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 856.241574] env[61594]: return func(*args, **kwargs) [ 856.241574] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 856.241574] env[61594]: raise e [ 856.241574] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 856.241574] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 856.241574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 856.241574] env[61594]: created_port_ids = self._update_ports_for_instance( [ 856.241574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 856.241574] env[61594]: with excutils.save_and_reraise_exception(): [ 856.241574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 856.241574] env[61594]: self.force_reraise() [ 856.241574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 856.241574] env[61594]: raise self.value [ 856.241574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 856.241574] env[61594]: updated_port = self._update_port( [ 856.241574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 856.241574] env[61594]: _ensure_no_port_binding_failure(port) [ 856.241574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 856.241574] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 856.242224] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. [ 856.242224] env[61594]: Removing descriptor: 25 [ 856.242224] env[61594]: ERROR nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Traceback (most recent call last): [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] yield resources [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self.driver.spawn(context, instance, image_meta, [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 856.242224] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] vm_ref = self.build_virtual_machine(instance, [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] vif_infos = vmwarevif.get_vif_info(self._session, [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] for vif in network_info: [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return self._sync_wrapper(fn, *args, **kwargs) [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self.wait() [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self[:] = self._gt.wait() [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return self._exit_event.wait() [ 856.242496] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] result = hub.switch() [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return self.greenlet.switch() [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] result = function(*args, **kwargs) [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return func(*args, **kwargs) [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] raise e [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] nwinfo = self.network_api.allocate_for_instance( [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 856.242774] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] created_port_ids = self._update_ports_for_instance( [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] with excutils.save_and_reraise_exception(): [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self.force_reraise() [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] raise self.value [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] updated_port = self._update_port( [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] _ensure_no_port_binding_failure(port) [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 856.243076] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] raise exception.PortBindingFailed(port_id=port['id']) [ 856.243337] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] nova.exception.PortBindingFailed: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. [ 856.243337] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] [ 856.243337] env[61594]: INFO nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Terminating instance [ 856.244890] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Acquiring lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.244890] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Acquired lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.244890] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 856.245287] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 3c9c149afede4a14811d983c171f4a48 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 856.252606] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c9c149afede4a14811d983c171f4a48 [ 856.299553] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 856.337660] env[61594]: DEBUG oslo_vmware.exceptions [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 856.337660] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.337660] env[61594]: ERROR nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 856.337660] env[61594]: Faults: ['InvalidArgument'] [ 856.337660] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Traceback (most recent call last): [ 856.337660] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 856.337660] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] yield resources [ 856.337660] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 856.337660] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self.driver.spawn(context, instance, image_meta, [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self._vmops.spawn(context, instance, image_meta, injected_files, [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self._fetch_image_if_missing(context, vi) [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] image_cache(vi, tmp_image_ds_loc) [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] vm_util.copy_virtual_disk( [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] session._wait_for_task(vmdk_copy_task) [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] return self.wait_for_task(task_ref) [ 856.337949] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] return evt.wait() [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] result = hub.switch() [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] return self.greenlet.switch() [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self.f(*self.args, **self.kw) [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] raise exceptions.translate_fault(task_info.error) [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Faults: ['InvalidArgument'] [ 856.338245] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] [ 856.338518] env[61594]: INFO nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Terminating instance [ 856.339304] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.339505] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.340231] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquiring lock "refresh_cache-28bcec42-4fb0-4ef1-b882-6224fdbcec16" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 856.340572] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquired lock "refresh_cache-28bcec42-4fb0-4ef1-b882-6224fdbcec16" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.340757] env[61594]: DEBUG nova.network.neutron [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 856.341205] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 2f5b852b9cdf44b2ad42356209b6a421 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 856.341906] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed7f2d64-6c7a-4cc1-809f-66c95b426678 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.348452] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f5b852b9cdf44b2ad42356209b6a421 [ 856.356761] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.356975] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 856.358341] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5dbda3f0-6872-4f8e-8c57-00f26496782d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.366387] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 856.366387] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52115c1c-8718-cbf3-5a95-b887b4a90605" [ 856.366387] env[61594]: _type = "Task" [ 856.366387] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.376670] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52115c1c-8718-cbf3-5a95-b887b4a90605, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.419027] env[61594]: DEBUG nova.network.neutron [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 856.879167] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 856.879698] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating directory with path [datastore1] vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 856.882088] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c08abc86-f4de-4e07-ab21-b6b903c233ea {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.901194] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Created directory with path [datastore1] vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 856.901969] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Fetch image to [datastore1] vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 856.902361] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 856.903333] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7bf421b-5bc8-449e-b11e-6cc58b240a70 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.913023] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000ed123-acd5-4700-95c4-82874e6d30ce {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.924238] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd3efe7b-e3c5-471d-907d-57af5cfcf43e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.929511] env[61594]: DEBUG nova.network.neutron [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.930291] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 73aef997e9f04f638c5de0c538c932f5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 856.931897] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.932495] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 1057af8f78bb4ec89f6349672fd44d6b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 856.970363] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 73aef997e9f04f638c5de0c538c932f5 [ 856.971177] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1057af8f78bb4ec89f6349672fd44d6b [ 856.972312] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8ded1c-407b-413d-befb-99cc477021cd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.975289] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Releasing lock "refresh_cache-28bcec42-4fb0-4ef1-b882-6224fdbcec16" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.975843] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 856.976195] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 856.976762] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Releasing lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.977293] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 856.977639] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 856.981018] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693c57ae-7abe-46c6-a930-0389d7b6b7ed {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.981880] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78357274-b298-4eb1-bf27-f94bf0a5fe8c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.990298] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-962c80a2-c571-457a-ba55-b24bc1748cb0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.994235] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 856.996205] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-827de004-698d-4acc-a968-21bc7dcfc5bf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.005290] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bad158e-78dd-4f55-a583-1c6b49ad4350 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.026334] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 857.035948] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 11943a7b-9afa-4e9c-84af-079ccc2c7cf1 could not be found. [ 857.035948] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 857.035948] env[61594]: INFO nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Took 0.06 seconds to destroy the instance on the hypervisor. [ 857.035948] env[61594]: DEBUG oslo.service.loopingcall [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.037353] env[61594]: DEBUG nova.compute.manager [-] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 857.037675] env[61594]: DEBUG nova.network.neutron [-] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 857.039409] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 857.039692] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 857.042032] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Deleting the datastore file [datastore1] 28bcec42-4fb0-4ef1-b882-6224fdbcec16 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.042032] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c24fffcf-0b09-4c0b-8b45-0f47fd64982e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.049698] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Waiting for the task: (returnval){ [ 857.049698] env[61594]: value = "task-1291426" [ 857.049698] env[61594]: _type = "Task" [ 857.049698] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.058843] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Task: {'id': task-1291426, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.103629] env[61594]: DEBUG nova.network.neutron [-] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.104384] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4a6abd7247cc4bed91df2065376fdaac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.113851] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a6abd7247cc4bed91df2065376fdaac [ 857.114337] env[61594]: DEBUG nova.network.neutron [-] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.117222] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 59b9a9d83e5b4384befba66c39469e2f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.118944] env[61594]: DEBUG oslo_vmware.rw_handles [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 857.179554] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59b9a9d83e5b4384befba66c39469e2f [ 857.180220] env[61594]: INFO nova.compute.manager [-] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Took 0.14 seconds to deallocate network for instance. [ 857.183593] env[61594]: DEBUG nova.compute.claims [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 857.183973] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.184277] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.186352] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 27b614a2b8084dd48bc1b403af1b187a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.187732] env[61594]: DEBUG oslo_vmware.rw_handles [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 857.188064] env[61594]: DEBUG oslo_vmware.rw_handles [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 857.226036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 27b614a2b8084dd48bc1b403af1b187a [ 857.348578] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f304f890-8490-4191-99ca-a8ce247b511e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.357845] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676602f9-936f-47dc-a108-9d59f70a6163 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.389052] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c41fea0-2d2e-403b-8b9c-ff0b173248c5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.398759] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8106715-399c-497a-a59e-fd5db028f440 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.415469] env[61594]: DEBUG nova.compute.provider_tree [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.416054] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg c45ac9c86eea4e9382c0a277bb9c453a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.426156] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c45ac9c86eea4e9382c0a277bb9c453a [ 857.427202] env[61594]: DEBUG nova.scheduler.client.report [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 857.430090] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 7bb815e4c952439f95566b82dd165d25 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.437147] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Successfully created port: ea2e1bce-3212-4e30-9563-3347fadd7f9f {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 857.451238] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7bb815e4c952439f95566b82dd165d25 [ 857.451894] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.268s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.452624] env[61594]: ERROR nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Traceback (most recent call last): [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self.driver.spawn(context, instance, image_meta, [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] vm_ref = self.build_virtual_machine(instance, [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] vif_infos = vmwarevif.get_vif_info(self._session, [ 857.452624] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] for vif in network_info: [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return self._sync_wrapper(fn, *args, **kwargs) [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self.wait() [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self[:] = self._gt.wait() [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return self._exit_event.wait() [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] result = hub.switch() [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 857.452988] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return self.greenlet.switch() [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] result = function(*args, **kwargs) [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] return func(*args, **kwargs) [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] raise e [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] nwinfo = self.network_api.allocate_for_instance( [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] created_port_ids = self._update_ports_for_instance( [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] with excutils.save_and_reraise_exception(): [ 857.453343] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] self.force_reraise() [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] raise self.value [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] updated_port = self._update_port( [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] _ensure_no_port_binding_failure(port) [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] raise exception.PortBindingFailed(port_id=port['id']) [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] nova.exception.PortBindingFailed: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. [ 857.453715] env[61594]: ERROR nova.compute.manager [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] [ 857.454038] env[61594]: DEBUG nova.compute.utils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.455372] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Build of instance 11943a7b-9afa-4e9c-84af-079ccc2c7cf1 was re-scheduled: Binding failed for port 82010836-c561-4ba9-bf30-048d661c0450, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 857.455914] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 857.456262] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Acquiring lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.456478] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Acquired lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.456720] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.457211] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 6004073b5b754723877c1146f2f8a60c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.465646] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6004073b5b754723877c1146f2f8a60c [ 857.512193] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.560129] env[61594]: DEBUG oslo_vmware.api [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Task: {'id': task-1291426, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041393} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.560460] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 857.560748] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 857.560991] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 857.561319] env[61594]: INFO nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Took 0.59 seconds to destroy the instance on the hypervisor. [ 857.561590] env[61594]: DEBUG oslo.service.loopingcall [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 857.561858] env[61594]: DEBUG nova.compute.manager [-] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 857.564013] env[61594]: DEBUG nova.compute.claims [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 857.564234] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.564500] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.566631] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 33507ffcadbd4a32830f8e231552dc39 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.630099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33507ffcadbd4a32830f8e231552dc39 [ 857.782032] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f543ee5-34c1-46c5-972c-c502a8a29048 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.794425] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187a5569-26fb-4f16-9c68-144e5ac6b2e5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.849974] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6c6a17-855d-4e1a-9d36-4dd8d693cb3f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.864397] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183cf852-e457-494f-8a28-7f335db0cfa5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.886101] env[61594]: DEBUG nova.compute.provider_tree [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.886917] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 2627a7f488bf4643a34e186514c47e0a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.897593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2627a7f488bf4643a34e186514c47e0a [ 857.898546] env[61594]: DEBUG nova.scheduler.client.report [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 857.900889] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg b89b08206bd04045acd1336397a43854 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.913441] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b89b08206bd04045acd1336397a43854 [ 857.914925] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.350s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.914925] env[61594]: ERROR nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 857.914925] env[61594]: Faults: ['InvalidArgument'] [ 857.914925] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Traceback (most recent call last): [ 857.914925] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 857.914925] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self.driver.spawn(context, instance, image_meta, [ 857.914925] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 857.914925] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self._vmops.spawn(context, instance, image_meta, injected_files, [ 857.914925] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 857.914925] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self._fetch_image_if_missing(context, vi) [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] image_cache(vi, tmp_image_ds_loc) [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] vm_util.copy_virtual_disk( [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] session._wait_for_task(vmdk_copy_task) [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] return self.wait_for_task(task_ref) [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] return evt.wait() [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] result = hub.switch() [ 857.915255] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] return self.greenlet.switch() [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] self.f(*self.args, **self.kw) [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] raise exceptions.translate_fault(task_info.error) [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Faults: ['InvalidArgument'] [ 857.915544] env[61594]: ERROR nova.compute.manager [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] [ 857.915778] env[61594]: DEBUG nova.compute.utils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.920312] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Build of instance 28bcec42-4fb0-4ef1-b882-6224fdbcec16 was re-scheduled: A specified parameter was not correct: fileType [ 857.920312] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 857.920819] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 857.921108] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquiring lock "refresh_cache-28bcec42-4fb0-4ef1-b882-6224fdbcec16" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 857.921303] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Acquired lock "refresh_cache-28bcec42-4fb0-4ef1-b882-6224fdbcec16" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.921663] env[61594]: DEBUG nova.network.neutron [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.921989] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg c5b22ea7084548f4a377d2b1cf2fcc81 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 857.931735] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5b22ea7084548f4a377d2b1cf2fcc81 [ 858.009285] env[61594]: DEBUG nova.network.neutron [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.100394] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.101050] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg d064f7831a7e4bba924c950601f8c2d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.112097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d064f7831a7e4bba924c950601f8c2d8 [ 858.112927] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Releasing lock "refresh_cache-11943a7b-9afa-4e9c-84af-079ccc2c7cf1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.113778] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 858.114041] env[61594]: DEBUG nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 858.114414] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 858.195424] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.196030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 87c8a4de6ce648eb9c3876605ec336e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.205082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87c8a4de6ce648eb9c3876605ec336e0 [ 858.205738] env[61594]: DEBUG nova.network.neutron [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.206322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 673cda79ce6a4787a5ae4633d1813ece in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.215866] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 673cda79ce6a4787a5ae4633d1813ece [ 858.216500] env[61594]: INFO nova.compute.manager [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] [instance: 11943a7b-9afa-4e9c-84af-079ccc2c7cf1] Took 0.10 seconds to deallocate network for instance. [ 858.218340] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg 98f6670b81c14e1ab79a33f5bc0b17d2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.254238] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98f6670b81c14e1ab79a33f5bc0b17d2 [ 858.257491] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg decb1a2bbcb642efabd5ccc7342658ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.306086] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg decb1a2bbcb642efabd5ccc7342658ed [ 858.326876] env[61594]: DEBUG nova.network.neutron [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.327445] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 6ba1767e36604696901b9c722261e582 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.336276] env[61594]: INFO nova.scheduler.client.report [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Deleted allocations for instance 11943a7b-9afa-4e9c-84af-079ccc2c7cf1 [ 858.343975] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ba1767e36604696901b9c722261e582 [ 858.346484] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Releasing lock "refresh_cache-28bcec42-4fb0-4ef1-b882-6224fdbcec16" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 858.346736] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 858.346924] env[61594]: DEBUG nova.compute.manager [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] [instance: 28bcec42-4fb0-4ef1-b882-6224fdbcec16] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 858.348559] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 72b74e76e98048b4abb0ccb407b0a87e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.349808] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Expecting reply to msg ef074979a11c4f8faea72b8573eb51ab in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.373107] env[61594]: DEBUG nova.compute.manager [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] [instance: b56a9692-3745-4513-879c-4298716c5e81] Received event network-changed-9cc9c772-a915-4d6d-8e40-aa6981387e9b {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 858.373107] env[61594]: DEBUG nova.compute.manager [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] [instance: b56a9692-3745-4513-879c-4298716c5e81] Refreshing instance network info cache due to event network-changed-9cc9c772-a915-4d6d-8e40-aa6981387e9b. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 858.373107] env[61594]: DEBUG oslo_concurrency.lockutils [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] Acquiring lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 858.373107] env[61594]: DEBUG oslo_concurrency.lockutils [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] Acquired lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 858.373107] env[61594]: DEBUG nova.network.neutron [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] [instance: b56a9692-3745-4513-879c-4298716c5e81] Refreshing network info cache for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 858.373478] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] Expecting reply to msg 89e8e89615664ff9b8f0e0a8dfde4b4e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.375070] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef074979a11c4f8faea72b8573eb51ab [ 858.375545] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0b8d0c24-6031-4c9f-9d22-7e01f7ce2fea tempest-ServersTestJSON-1540490964 tempest-ServersTestJSON-1540490964-project-member] Lock "11943a7b-9afa-4e9c-84af-079ccc2c7cf1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.109s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.392128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89e8e89615664ff9b8f0e0a8dfde4b4e [ 858.403022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72b74e76e98048b4abb0ccb407b0a87e [ 858.405656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 08ef0ea81d904c6f8ba02626464c908c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.467973] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08ef0ea81d904c6f8ba02626464c908c [ 858.505020] env[61594]: INFO nova.scheduler.client.report [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Deleted allocations for instance 28bcec42-4fb0-4ef1-b882-6224fdbcec16 [ 858.511182] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Expecting reply to msg 96300379752e4c73b5d81a43da4c5102 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 858.513250] env[61594]: DEBUG nova.network.neutron [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.528381] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96300379752e4c73b5d81a43da4c5102 [ 858.528381] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498c7327-85e1-40cc-8f36-d708b0c38654 tempest-ServerShowV254Test-182182488 tempest-ServerShowV254Test-182182488-project-member] Lock "28bcec42-4fb0-4ef1-b882-6224fdbcec16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.343s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 859.088803] env[61594]: DEBUG nova.network.neutron [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] [instance: b56a9692-3745-4513-879c-4298716c5e81] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.090787] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] Expecting reply to msg 6ff27eb013514d9fb822ac9e9b07e7ec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 859.099666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ff27eb013514d9fb822ac9e9b07e7ec [ 859.100364] env[61594]: DEBUG oslo_concurrency.lockutils [req-2adda908-35c9-4048-b3dc-5e308ff705d1 req-ce9e74e3-c6e6-4765-8c5c-0fe69bba8cb9 service nova] Releasing lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 859.652928] env[61594]: ERROR nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. [ 859.652928] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 859.652928] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 859.652928] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 859.652928] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 859.652928] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 859.652928] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 859.652928] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 859.652928] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 859.652928] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 859.652928] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 859.652928] env[61594]: ERROR nova.compute.manager raise self.value [ 859.652928] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 859.652928] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 859.652928] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 859.652928] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 859.653574] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 859.653574] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 859.653574] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. [ 859.653574] env[61594]: ERROR nova.compute.manager [ 859.653574] env[61594]: Traceback (most recent call last): [ 859.653574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 859.653574] env[61594]: listener.cb(fileno) [ 859.653574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 859.653574] env[61594]: result = function(*args, **kwargs) [ 859.653574] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 859.653574] env[61594]: return func(*args, **kwargs) [ 859.653574] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 859.653574] env[61594]: raise e [ 859.653574] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 859.653574] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 859.653574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 859.653574] env[61594]: created_port_ids = self._update_ports_for_instance( [ 859.653574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 859.653574] env[61594]: with excutils.save_and_reraise_exception(): [ 859.653574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 859.653574] env[61594]: self.force_reraise() [ 859.653574] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 859.653574] env[61594]: raise self.value [ 859.653574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 859.653574] env[61594]: updated_port = self._update_port( [ 859.653574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 859.653574] env[61594]: _ensure_no_port_binding_failure(port) [ 859.653574] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 859.653574] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 859.654275] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. [ 859.654275] env[61594]: Removing descriptor: 22 [ 859.654275] env[61594]: ERROR nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] Traceback (most recent call last): [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] yield resources [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self.driver.spawn(context, instance, image_meta, [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 859.654275] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] vm_ref = self.build_virtual_machine(instance, [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] vif_infos = vmwarevif.get_vif_info(self._session, [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] for vif in network_info: [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return self._sync_wrapper(fn, *args, **kwargs) [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self.wait() [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self[:] = self._gt.wait() [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return self._exit_event.wait() [ 859.657129] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] result = hub.switch() [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return self.greenlet.switch() [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] result = function(*args, **kwargs) [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return func(*args, **kwargs) [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] raise e [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] nwinfo = self.network_api.allocate_for_instance( [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 859.657781] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] created_port_ids = self._update_ports_for_instance( [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] with excutils.save_and_reraise_exception(): [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self.force_reraise() [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] raise self.value [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] updated_port = self._update_port( [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] _ensure_no_port_binding_failure(port) [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 859.658457] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] raise exception.PortBindingFailed(port_id=port['id']) [ 859.658796] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] nova.exception.PortBindingFailed: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. [ 859.658796] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] [ 859.658796] env[61594]: INFO nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Terminating instance [ 859.660548] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Acquiring lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.660724] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Acquired lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.660929] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 859.662398] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg af67b676e7714764b9881ae9279a32c6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 859.674082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af67b676e7714764b9881ae9279a32c6 [ 859.725313] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 860.100498] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.101321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 120a31c9a8a54463a793eaf2a486a828 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 860.112725] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 120a31c9a8a54463a793eaf2a486a828 [ 860.113596] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Releasing lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.113843] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 860.114047] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 860.114606] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93e750cb-9ec0-43b6-b2d9-e543d51e5b9a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.129752] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee531d67-770e-4991-8739-b10e2247a0a8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.161437] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b56a9692-3745-4513-879c-4298716c5e81 could not be found. [ 860.161685] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 860.161875] env[61594]: INFO nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Took 0.05 seconds to destroy the instance on the hypervisor. [ 860.162154] env[61594]: DEBUG oslo.service.loopingcall [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.162413] env[61594]: DEBUG nova.compute.manager [-] [instance: b56a9692-3745-4513-879c-4298716c5e81] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 860.163138] env[61594]: DEBUG nova.network.neutron [-] [instance: b56a9692-3745-4513-879c-4298716c5e81] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 860.215870] env[61594]: DEBUG nova.network.neutron [-] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 860.216419] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 33cfbee950e44bd7bd81c9f4e1cc91d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 860.225307] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33cfbee950e44bd7bd81c9f4e1cc91d5 [ 860.225786] env[61594]: DEBUG nova.network.neutron [-] [instance: b56a9692-3745-4513-879c-4298716c5e81] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.226230] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 0c827721099e47dbae57133646f9e2ca in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 860.239266] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c827721099e47dbae57133646f9e2ca [ 860.239790] env[61594]: INFO nova.compute.manager [-] [instance: b56a9692-3745-4513-879c-4298716c5e81] Took 0.08 seconds to deallocate network for instance. [ 860.241964] env[61594]: DEBUG nova.compute.claims [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 860.243230] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 860.243230] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.245280] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg b56ee2207bf240a5bb497e4e1d24b366 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 860.294000] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b56ee2207bf240a5bb497e4e1d24b366 [ 860.400019] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dca7ed-e69b-41e2-91f5-ab7b4567f2b9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.406606] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77af6c5-6ffd-4120-a5b3-c02357640b00 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.453793] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a47c6d-16b3-41c7-966b-6fc3e12f6785 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.462600] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40fe82ec-9076-46c5-841f-194fafc2bb9b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.481126] env[61594]: DEBUG nova.compute.provider_tree [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 860.481763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg a14a5eb959ca4cceac2eda64288d00d0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 860.498363] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a14a5eb959ca4cceac2eda64288d00d0 [ 860.499616] env[61594]: DEBUG nova.scheduler.client.report [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 860.504055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 79048dde9d17477394984c8c987799c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 860.519695] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79048dde9d17477394984c8c987799c3 [ 860.520709] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.277s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.521538] env[61594]: ERROR nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] Traceback (most recent call last): [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self.driver.spawn(context, instance, image_meta, [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self._vmops.spawn(context, instance, image_meta, injected_files, [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] vm_ref = self.build_virtual_machine(instance, [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] vif_infos = vmwarevif.get_vif_info(self._session, [ 860.521538] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] for vif in network_info: [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return self._sync_wrapper(fn, *args, **kwargs) [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self.wait() [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self[:] = self._gt.wait() [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return self._exit_event.wait() [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] result = hub.switch() [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 860.522444] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return self.greenlet.switch() [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] result = function(*args, **kwargs) [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] return func(*args, **kwargs) [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] raise e [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] nwinfo = self.network_api.allocate_for_instance( [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] created_port_ids = self._update_ports_for_instance( [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] with excutils.save_and_reraise_exception(): [ 860.523969] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] self.force_reraise() [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] raise self.value [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] updated_port = self._update_port( [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] _ensure_no_port_binding_failure(port) [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] raise exception.PortBindingFailed(port_id=port['id']) [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] nova.exception.PortBindingFailed: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. [ 860.524565] env[61594]: ERROR nova.compute.manager [instance: b56a9692-3745-4513-879c-4298716c5e81] [ 860.524990] env[61594]: DEBUG nova.compute.utils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 860.524990] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Build of instance b56a9692-3745-4513-879c-4298716c5e81 was re-scheduled: Binding failed for port 9cc9c772-a915-4d6d-8e40-aa6981387e9b, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 860.524990] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 860.524990] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Acquiring lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.525181] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Acquired lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.525181] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 860.525551] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 969dc4c6c8384d159e3bdbdef08b5435 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 860.534226] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 969dc4c6c8384d159e3bdbdef08b5435 [ 860.726958] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 861.299739] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.299739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 33a517713ac24500be43799212ab35dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 861.308615] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33a517713ac24500be43799212ab35dd [ 861.312071] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Releasing lock "refresh_cache-b56a9692-3745-4513-879c-4298716c5e81" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.312071] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 861.312071] env[61594]: DEBUG nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 861.312071] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 861.350128] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 861.350128] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 7e74bbdd34c54430985771ec0dcbbdae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 861.357261] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e74bbdd34c54430985771ec0dcbbdae [ 861.359556] env[61594]: DEBUG nova.network.neutron [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.359556] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg fad79d91012c43ceb269ffe304ef683e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 861.370338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fad79d91012c43ceb269ffe304ef683e [ 861.370338] env[61594]: INFO nova.compute.manager [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] [instance: b56a9692-3745-4513-879c-4298716c5e81] Took 0.06 seconds to deallocate network for instance. [ 861.370338] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg 266b24dba928459b9a600c552b4b314d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 861.410874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 266b24dba928459b9a600c552b4b314d [ 861.410874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg d2b71c027c624f268824a7220ceae195 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 861.444738] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d2b71c027c624f268824a7220ceae195 [ 861.491175] env[61594]: DEBUG nova.compute.manager [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Received event network-changed-ea2e1bce-3212-4e30-9563-3347fadd7f9f {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 861.491256] env[61594]: DEBUG nova.compute.manager [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Refreshing instance network info cache due to event network-changed-ea2e1bce-3212-4e30-9563-3347fadd7f9f. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 861.491439] env[61594]: DEBUG oslo_concurrency.lockutils [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] Acquiring lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.491580] env[61594]: DEBUG oslo_concurrency.lockutils [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] Acquired lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.493087] env[61594]: DEBUG nova.network.neutron [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Refreshing network info cache for port ea2e1bce-3212-4e30-9563-3347fadd7f9f {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 861.493087] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] Expecting reply to msg 15c0477ccf504d1c962adf39c61dbcee in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 861.497527] env[61594]: INFO nova.scheduler.client.report [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Deleted allocations for instance b56a9692-3745-4513-879c-4298716c5e81 [ 861.506045] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Expecting reply to msg afcd4a0c2dc3467b8527e0ce07359398 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 861.507355] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15c0477ccf504d1c962adf39c61dbcee [ 861.528168] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg afcd4a0c2dc3467b8527e0ce07359398 [ 861.528718] env[61594]: DEBUG oslo_concurrency.lockutils [None req-c738014b-17a0-49fd-8444-2689fa95efc3 tempest-ServerMetadataNegativeTestJSON-11169287 tempest-ServerMetadataNegativeTestJSON-11169287-project-member] Lock "b56a9692-3745-4513-879c-4298716c5e81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.543s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.612999] env[61594]: DEBUG nova.network.neutron [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 862.284604] env[61594]: ERROR nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. [ 862.284604] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 862.284604] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 862.284604] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 862.284604] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.284604] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 862.284604] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.284604] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 862.284604] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.284604] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 862.284604] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.284604] env[61594]: ERROR nova.compute.manager raise self.value [ 862.284604] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.284604] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 862.284604] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.284604] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 862.285194] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.285194] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 862.285194] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. [ 862.285194] env[61594]: ERROR nova.compute.manager [ 862.285194] env[61594]: Traceback (most recent call last): [ 862.285194] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 862.285194] env[61594]: listener.cb(fileno) [ 862.285194] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 862.285194] env[61594]: result = function(*args, **kwargs) [ 862.285194] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 862.285194] env[61594]: return func(*args, **kwargs) [ 862.285194] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 862.285194] env[61594]: raise e [ 862.285194] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 862.285194] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 862.285194] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.285194] env[61594]: created_port_ids = self._update_ports_for_instance( [ 862.285194] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.285194] env[61594]: with excutils.save_and_reraise_exception(): [ 862.285194] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.285194] env[61594]: self.force_reraise() [ 862.285194] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.285194] env[61594]: raise self.value [ 862.285194] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.285194] env[61594]: updated_port = self._update_port( [ 862.285194] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.285194] env[61594]: _ensure_no_port_binding_failure(port) [ 862.285194] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.285194] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 862.286219] env[61594]: nova.exception.PortBindingFailed: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. [ 862.286219] env[61594]: Removing descriptor: 21 [ 862.286219] env[61594]: ERROR nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Traceback (most recent call last): [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] yield resources [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self.driver.spawn(context, instance, image_meta, [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 862.286219] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] vm_ref = self.build_virtual_machine(instance, [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] for vif in network_info: [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return self._sync_wrapper(fn, *args, **kwargs) [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self.wait() [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self[:] = self._gt.wait() [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return self._exit_event.wait() [ 862.286491] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] result = hub.switch() [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return self.greenlet.switch() [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] result = function(*args, **kwargs) [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return func(*args, **kwargs) [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] raise e [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] nwinfo = self.network_api.allocate_for_instance( [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.286837] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] created_port_ids = self._update_ports_for_instance( [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] with excutils.save_and_reraise_exception(): [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self.force_reraise() [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] raise self.value [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] updated_port = self._update_port( [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] _ensure_no_port_binding_failure(port) [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.287686] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] raise exception.PortBindingFailed(port_id=port['id']) [ 862.287956] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] nova.exception.PortBindingFailed: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. [ 862.287956] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] [ 862.287956] env[61594]: INFO nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Terminating instance [ 862.288162] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.326750] env[61594]: DEBUG nova.network.neutron [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.327321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] Expecting reply to msg 77c35be785a149e4b231c307f3de51d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 862.336879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 77c35be785a149e4b231c307f3de51d5 [ 862.337590] env[61594]: DEBUG oslo_concurrency.lockutils [req-5e3740fc-8d25-4afe-99ba-082f8b0964c6 req-13059de4-71f1-41a0-8b72-c46b5f0f1fe7 service nova] Releasing lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.337958] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.338160] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 862.339670] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 02857bcf4a4c46c1a413475ad69e5581 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 862.352240] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 02857bcf4a4c46c1a413475ad69e5581 [ 862.416622] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 862.615300] env[61594]: ERROR nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. [ 862.615300] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 862.615300] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 862.615300] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 862.615300] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.615300] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 862.615300] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.615300] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 862.615300] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.615300] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 862.615300] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.615300] env[61594]: ERROR nova.compute.manager raise self.value [ 862.615300] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.615300] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 862.615300] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.615300] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 862.615709] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.615709] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 862.615709] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. [ 862.615709] env[61594]: ERROR nova.compute.manager [ 862.615709] env[61594]: Traceback (most recent call last): [ 862.615709] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 862.615709] env[61594]: listener.cb(fileno) [ 862.615709] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 862.615709] env[61594]: result = function(*args, **kwargs) [ 862.615709] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 862.615709] env[61594]: return func(*args, **kwargs) [ 862.615709] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 862.615709] env[61594]: raise e [ 862.615709] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 862.615709] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 862.615709] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.615709] env[61594]: created_port_ids = self._update_ports_for_instance( [ 862.615709] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.615709] env[61594]: with excutils.save_and_reraise_exception(): [ 862.615709] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.615709] env[61594]: self.force_reraise() [ 862.615709] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.615709] env[61594]: raise self.value [ 862.615709] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.615709] env[61594]: updated_port = self._update_port( [ 862.615709] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.615709] env[61594]: _ensure_no_port_binding_failure(port) [ 862.615709] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.615709] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 862.616337] env[61594]: nova.exception.PortBindingFailed: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. [ 862.616337] env[61594]: Removing descriptor: 24 [ 862.616655] env[61594]: ERROR nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Traceback (most recent call last): [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] yield resources [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self.driver.spawn(context, instance, image_meta, [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] vm_ref = self.build_virtual_machine(instance, [ 862.616655] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] vif_infos = vmwarevif.get_vif_info(self._session, [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] for vif in network_info: [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return self._sync_wrapper(fn, *args, **kwargs) [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self.wait() [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self[:] = self._gt.wait() [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return self._exit_event.wait() [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 862.617079] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] result = hub.switch() [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return self.greenlet.switch() [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] result = function(*args, **kwargs) [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return func(*args, **kwargs) [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] raise e [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] nwinfo = self.network_api.allocate_for_instance( [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] created_port_ids = self._update_ports_for_instance( [ 862.617544] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] with excutils.save_and_reraise_exception(): [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self.force_reraise() [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] raise self.value [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] updated_port = self._update_port( [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] _ensure_no_port_binding_failure(port) [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] raise exception.PortBindingFailed(port_id=port['id']) [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] nova.exception.PortBindingFailed: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. [ 862.619607] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] [ 862.619953] env[61594]: INFO nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Terminating instance [ 862.619953] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Acquiring lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.619953] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Acquired lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.620051] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 862.620598] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg b1fcf43b74f8446ba178de298cd6ec18 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 862.636332] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1fcf43b74f8446ba178de298cd6ec18 [ 862.694027] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 862.904446] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.905022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 583d6dd75f074922b43b75c961dd59dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 862.913793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 583d6dd75f074922b43b75c961dd59dc [ 862.914508] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.914918] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 862.915133] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 862.915680] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ebaa421-0df1-4149-80f0-89c24273e92f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.926976] env[61594]: DEBUG nova.compute.manager [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Received event network-changed-a230a46e-0d35-42de-8308-d37547cc8465 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 862.927204] env[61594]: DEBUG nova.compute.manager [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Refreshing instance network info cache due to event network-changed-a230a46e-0d35-42de-8308-d37547cc8465. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 862.927394] env[61594]: DEBUG oslo_concurrency.lockutils [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] Acquiring lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.936481] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0bba3ca-89f4-4f92-b842-eb4a30d6b893 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.973404] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ead7b401-1fd8-49b8-958a-2537714f22ca could not be found. [ 862.973672] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 862.973877] env[61594]: INFO nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Took 0.06 seconds to destroy the instance on the hypervisor. [ 862.974319] env[61594]: DEBUG oslo.service.loopingcall [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.974612] env[61594]: DEBUG nova.compute.manager [-] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 862.974711] env[61594]: DEBUG nova.network.neutron [-] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 863.044586] env[61594]: DEBUG nova.network.neutron [-] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 863.045182] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg fc9f04ab45ed4530a236e65cbf0764e4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.055899] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc9f04ab45ed4530a236e65cbf0764e4 [ 863.056557] env[61594]: DEBUG nova.network.neutron [-] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.056970] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg cf3e892f55fe46e2a4dc15c444f1877a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.063643] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.064048] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 458eabda3ab34105bf73bc7b8b3357c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.069260] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cf3e892f55fe46e2a4dc15c444f1877a [ 863.069981] env[61594]: INFO nova.compute.manager [-] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Took 0.09 seconds to deallocate network for instance. [ 863.074781] env[61594]: DEBUG nova.compute.claims [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 863.074893] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.075122] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.076969] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 97f45cb9206c4293b85d696d5c8fe5c7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.079015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 458eabda3ab34105bf73bc7b8b3357c9 [ 863.079405] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Releasing lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.079940] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 863.083232] env[61594]: DEBUG oslo_concurrency.lockutils [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] Acquired lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.083232] env[61594]: DEBUG nova.network.neutron [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Refreshing network info cache for port a230a46e-0d35-42de-8308-d37547cc8465 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 863.083232] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] Expecting reply to msg 9c9211b6b4324aa18ccb348b3a3b3179 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.083232] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52d7a2f9-e16a-402a-8264-1f279f9d3cf0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.091477] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0184966-35df-4efd-82fb-988cde2e37d9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.104087] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9c9211b6b4324aa18ccb348b3a3b3179 [ 863.120810] env[61594]: WARNING nova.virt.vmwareapi.driver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance b89e3f53-d9cd-400a-8ba9-83e328a59de8 could not be found. [ 863.121048] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 863.121783] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97f45cb9206c4293b85d696d5c8fe5c7 [ 863.122161] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d2127ab3-704a-4e40-8ad3-de332bb1f5bb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.134298] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee959d5-c548-4793-8b96-585970620e71 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.145996] env[61594]: DEBUG nova.network.neutron [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 863.161300] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b89e3f53-d9cd-400a-8ba9-83e328a59de8 could not be found. [ 863.161822] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 863.161822] env[61594]: INFO nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Took 0.08 seconds to destroy the instance on the hypervisor. [ 863.162159] env[61594]: DEBUG oslo.service.loopingcall [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.162483] env[61594]: DEBUG nova.compute.manager [-] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 863.162483] env[61594]: DEBUG nova.network.neutron [-] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 863.224141] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3308dca0-3aea-46b0-8d10-11f5fed72888 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.227281] env[61594]: DEBUG nova.network.neutron [-] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 863.227779] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg c816c67727774283ad5bd0a56c9dc443 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.233847] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8792ebc7-4edf-43d6-a058-de8c0ff7a160 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.238458] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c816c67727774283ad5bd0a56c9dc443 [ 863.238458] env[61594]: DEBUG nova.network.neutron [-] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.238458] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg be3ac787f48f45ef8d902f16acb53ed1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.270737] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be3ac787f48f45ef8d902f16acb53ed1 [ 863.271705] env[61594]: INFO nova.compute.manager [-] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Took 0.11 seconds to deallocate network for instance. [ 863.272674] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c289688d-56df-46f2-931a-8bc85ff078d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.285370] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cbcc68-25e0-4f91-bfa3-9f9af8f9e314 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.301914] env[61594]: DEBUG nova.compute.provider_tree [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.302501] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 0a257082070c462e956596500eb7ede6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.318017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a257082070c462e956596500eb7ede6 [ 863.319049] env[61594]: DEBUG nova.scheduler.client.report [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 863.321992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg af0dcfb3b3944af5b2c27feb4723bd0c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.338811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg af0dcfb3b3944af5b2c27feb4723bd0c [ 863.339964] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.265s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.340957] env[61594]: ERROR nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Traceback (most recent call last): [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self.driver.spawn(context, instance, image_meta, [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] vm_ref = self.build_virtual_machine(instance, [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] vif_infos = vmwarevif.get_vif_info(self._session, [ 863.340957] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] for vif in network_info: [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return self._sync_wrapper(fn, *args, **kwargs) [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self.wait() [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self[:] = self._gt.wait() [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return self._exit_event.wait() [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] result = hub.switch() [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 863.341484] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return self.greenlet.switch() [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] result = function(*args, **kwargs) [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] return func(*args, **kwargs) [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] raise e [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] nwinfo = self.network_api.allocate_for_instance( [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] created_port_ids = self._update_ports_for_instance( [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] with excutils.save_and_reraise_exception(): [ 863.342672] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] self.force_reraise() [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] raise self.value [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] updated_port = self._update_port( [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] _ensure_no_port_binding_failure(port) [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] raise exception.PortBindingFailed(port_id=port['id']) [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] nova.exception.PortBindingFailed: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. [ 863.343612] env[61594]: ERROR nova.compute.manager [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] [ 863.345252] env[61594]: DEBUG nova.compute.utils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 863.345252] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Build of instance ead7b401-1fd8-49b8-958a-2537714f22ca was re-scheduled: Binding failed for port ea2e1bce-3212-4e30-9563-3347fadd7f9f, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 863.345252] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 863.345252] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.345407] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.345407] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.345407] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c7347f5b6e8e4827b2e4055d8b566879 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.355488] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7347f5b6e8e4827b2e4055d8b566879 [ 863.357995] env[61594]: INFO nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Took 0.08 seconds to detach 1 volumes for instance. [ 863.360282] env[61594]: DEBUG nova.compute.claims [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 863.360750] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 863.360750] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.362750] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 63777baef7034e2c8efa7ab4f6960b42 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.409967] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63777baef7034e2c8efa7ab4f6960b42 [ 863.507949] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c733cc41-c48e-423a-a5e3-8479b1cdb01a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.517387] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4334d96-0848-4ed5-8aab-bf5bae22c576 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.559120] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673e84e0-2d71-46fd-93d4-198d3aa776c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.567293] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426435b8-f1c1-4417-bbfc-f2ab3574fc01 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.582656] env[61594]: DEBUG nova.compute.provider_tree [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.583201] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg d6df51ba394043ea91b585a117b2ff29 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.592978] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d6df51ba394043ea91b585a117b2ff29 [ 863.593975] env[61594]: DEBUG nova.scheduler.client.report [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 863.596281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg df3d612174514b2cbaa69acf3aa942e2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.615015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg df3d612174514b2cbaa69acf3aa942e2 [ 863.615015] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.253s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 863.615015] env[61594]: ERROR nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. [ 863.615015] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Traceback (most recent call last): [ 863.615015] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 863.615015] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self.driver.spawn(context, instance, image_meta, [ 863.615015] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 863.615015] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 863.615015] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] vm_ref = self.build_virtual_machine(instance, [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] vif_infos = vmwarevif.get_vif_info(self._session, [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] for vif in network_info: [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return self._sync_wrapper(fn, *args, **kwargs) [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self.wait() [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self[:] = self._gt.wait() [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 863.615346] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return self._exit_event.wait() [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] result = hub.switch() [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return self.greenlet.switch() [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] result = function(*args, **kwargs) [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] return func(*args, **kwargs) [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] raise e [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] nwinfo = self.network_api.allocate_for_instance( [ 863.615829] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] created_port_ids = self._update_ports_for_instance( [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] with excutils.save_and_reraise_exception(): [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] self.force_reraise() [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] raise self.value [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] updated_port = self._update_port( [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] _ensure_no_port_binding_failure(port) [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 863.616248] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] raise exception.PortBindingFailed(port_id=port['id']) [ 863.616589] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] nova.exception.PortBindingFailed: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. [ 863.616589] env[61594]: ERROR nova.compute.manager [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] [ 863.616589] env[61594]: DEBUG nova.compute.utils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 863.620133] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Build of instance b89e3f53-d9cd-400a-8ba9-83e328a59de8 was re-scheduled: Binding failed for port a230a46e-0d35-42de-8308-d37547cc8465, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 863.620133] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 863.620133] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Acquiring lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.647689] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 863.778374] env[61594]: DEBUG nova.network.neutron [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.779062] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] Expecting reply to msg 1c89a0c2a8be40a5808f7988eea210c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.794380] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c89a0c2a8be40a5808f7988eea210c3 [ 863.795122] env[61594]: DEBUG oslo_concurrency.lockutils [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] Releasing lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.795444] env[61594]: DEBUG nova.compute.manager [req-ad0d33c8-fe72-47b8-b3b3-a5c976cc3735 req-ccadeda4-4585-4819-95b3-e7a061054af6 service nova] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Received event network-vif-deleted-a230a46e-0d35-42de-8308-d37547cc8465 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 863.795797] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Acquired lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.795985] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 863.796714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg a3de0cbf186a4e52a311e080466b7fae in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 863.811587] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a3de0cbf186a4e52a311e080466b7fae [ 863.880420] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.100453] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.101729] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 59fe1cd477cb412b89b0ba451f3e6555 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.114504] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59fe1cd477cb412b89b0ba451f3e6555 [ 864.116825] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-ead7b401-1fd8-49b8-958a-2537714f22ca" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.116825] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 864.116825] env[61594]: DEBUG nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 864.116825] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.174537] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.175181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 2ad0847849264adeab9f735c3994ac55 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.186332] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ad0847849264adeab9f735c3994ac55 [ 864.186950] env[61594]: DEBUG nova.network.neutron [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.187605] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 64b9ea44502f42918974ac46e8b26210 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.202022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 64b9ea44502f42918974ac46e8b26210 [ 864.202022] env[61594]: INFO nova.compute.manager [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: ead7b401-1fd8-49b8-958a-2537714f22ca] Took 0.08 seconds to deallocate network for instance. [ 864.202022] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 0b40a52b8a574503a3d7cdbcd803d1e5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.253018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b40a52b8a574503a3d7cdbcd803d1e5 [ 864.253018] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 7ab2e0436d864e44b5729ca0d3b44e58 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.270032] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.270560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 89acd90284634adb9003ab590d3626e3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.284587] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89acd90284634adb9003ab590d3626e3 [ 864.285273] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Releasing lock "refresh_cache-b89e3f53-d9cd-400a-8ba9-83e328a59de8" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.285482] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 864.285707] env[61594]: DEBUG nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 864.285835] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 864.295162] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ab2e0436d864e44b5729ca0d3b44e58 [ 864.308308] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.309336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 3c5dcd9cfcff4c5789dab78240c2e806 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.318914] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c5dcd9cfcff4c5789dab78240c2e806 [ 864.319492] env[61594]: DEBUG nova.network.neutron [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.320058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 54f6452ffffe4a2da645c2c6ba8294c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.321527] env[61594]: INFO nova.scheduler.client.report [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Deleted allocations for instance ead7b401-1fd8-49b8-958a-2537714f22ca [ 864.328902] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 63e6a35527644cd28c71ac0768ca57a3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.335368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 54f6452ffffe4a2da645c2c6ba8294c5 [ 864.335944] env[61594]: INFO nova.compute.manager [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] [instance: b89e3f53-d9cd-400a-8ba9-83e328a59de8] Took 0.05 seconds to deallocate network for instance. [ 864.337656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg f2bb27a3479040c7a77cfef5374e05a8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.344368] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 63e6a35527644cd28c71ac0768ca57a3 [ 864.344876] env[61594]: DEBUG oslo_concurrency.lockutils [None req-37f51081-b997-4174-8587-c095e18d99b7 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "ead7b401-1fd8-49b8-958a-2537714f22ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 9.088s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.422860] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2bb27a3479040c7a77cfef5374e05a8 [ 864.426646] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 751ab0cd05e24bf7b077916127f5f3f9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.464591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 751ab0cd05e24bf7b077916127f5f3f9 [ 864.491830] env[61594]: INFO nova.scheduler.client.report [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Deleted allocations for instance b89e3f53-d9cd-400a-8ba9-83e328a59de8 [ 864.500887] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Expecting reply to msg 5ccf610f56424cbeb70b3cd16a7377a1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.519565] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ccf610f56424cbeb70b3cd16a7377a1 [ 864.520229] env[61594]: DEBUG oslo_concurrency.lockutils [None req-ae466a58-1a89-456a-964b-4ed2dd49919a tempest-ServersTestBootFromVolume-630008723 tempest-ServersTestBootFromVolume-630008723-project-member] Lock "b89e3f53-d9cd-400a-8ba9-83e328a59de8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.710s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.542546] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Acquiring lock "0a85edf7-72d1-471b-b543-8a91bf585cba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.542738] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Lock "0a85edf7-72d1-471b-b543-8a91bf585cba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.543310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 040e548b401346be80af67e3fefcc8d9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.560281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 040e548b401346be80af67e3fefcc8d9 [ 864.562032] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 864.562841] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg fc53195ef1404e889eecfaddbe0973e8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.611880] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fc53195ef1404e889eecfaddbe0973e8 [ 864.634389] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.634389] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 864.635514] env[61594]: INFO nova.compute.claims [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.637205] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 28968ca1745b4e378804c3cd906e18e6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.674975] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 28968ca1745b4e378804c3cd906e18e6 [ 864.677067] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 59c5cc4f0e1b4f46a78ada57ada76b77 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.690587] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 59c5cc4f0e1b4f46a78ada57ada76b77 [ 864.777440] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21c1ed6-b183-4633-bce7-65cb9c064406 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.788646] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273367a5-c817-4725-93cd-58ad19120446 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.820127] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e6627e-a0da-4c3f-894d-b0c1cce00d38 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.827894] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d89b0e0-7039-4b9b-99f2-f5cdd1784512 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.841565] env[61594]: DEBUG nova.compute.provider_tree [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.842021] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 0e26b86f7e9b49eb8ca2b66f400e9caa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.851787] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0e26b86f7e9b49eb8ca2b66f400e9caa [ 864.852815] env[61594]: DEBUG nova.scheduler.client.report [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 864.855210] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg d4ffbebd07484a03860616c36e65c646 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.874765] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d4ffbebd07484a03860616c36e65c646 [ 864.875627] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.242s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 864.876117] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 864.878113] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg d30e1394749d40ebb7f19fa674e2fa69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.915519] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d30e1394749d40ebb7f19fa674e2fa69 [ 864.916953] env[61594]: DEBUG nova.compute.utils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 864.917562] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg c36b4eacb04247958209d87bb7671ebe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.919346] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 864.919535] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 864.931358] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c36b4eacb04247958209d87bb7671ebe [ 864.932122] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 864.933762] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 6c128e6d6eba4a03acec910dc82eb834 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 864.975049] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c128e6d6eba4a03acec910dc82eb834 [ 864.975957] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 5135030c5ac141d6ad5cf35bd67d05ea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 865.014579] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5135030c5ac141d6ad5cf35bd67d05ea [ 865.015824] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 865.046966] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 865.046966] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 865.046966] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 865.047177] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 865.047207] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 865.047341] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 865.047587] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 865.047764] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 865.047933] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 865.048202] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 865.048395] env[61594]: DEBUG nova.virt.hardware [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 865.049642] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938667e1-e54a-40f3-bf09-873433eb2a92 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.059016] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6018ccc3-9938-4f5e-be42-9011c7e462e5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.162735] env[61594]: DEBUG nova.policy [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'caaa86bf3e484a109bb48f077f96a95c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '51ed8d04e4d94b4289dedd35273775da', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 865.544704] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.544704] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Cleaning up deleted instances {{(pid=61594) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 865.545470] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 4b10524f12c340bd839fddf9fdd14969 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 865.559311] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b10524f12c340bd839fddf9fdd14969 [ 865.559311] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] There are 0 instances to clean {{(pid=61594) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 865.559531] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.559677] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Cleaning up deleted instances with incomplete migration {{(pid=61594) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 865.560025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg efbe5b8687d54ef8be8c462abd694d06 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 865.572648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg efbe5b8687d54ef8be8c462abd694d06 [ 865.573918] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 865.574242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 6d2841dfd4d74f2aa98585dae14c90ca in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 865.584992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d2841dfd4d74f2aa98585dae14c90ca [ 866.067261] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Successfully created port: ba385b5e-d417-4b48-aa68-2c36ff17e231 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.604287] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "98cb1c0a-6fba-4e56-9211-6581d3eee790" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.604567] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "98cb1c0a-6fba-4e56-9211-6581d3eee790" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.604967] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 4c69de1d66c2402686a7c0510bb7de93 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.617056] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4c69de1d66c2402686a7c0510bb7de93 [ 866.617513] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 866.619169] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 0a7c6cda7ab4453b916c81ac10b96ac3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.656061] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0a7c6cda7ab4453b916c81ac10b96ac3 [ 866.676768] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.677090] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 866.678820] env[61594]: INFO nova.compute.claims [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 866.680793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 29aac4fb87164e91953c5b934eaf9962 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.726960] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29aac4fb87164e91953c5b934eaf9962 [ 866.728952] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 8d2d4a43ba054347916001aae8151521 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.738835] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d2d4a43ba054347916001aae8151521 [ 866.823416] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e738cf-e65f-4a59-9994-9ab880e017cd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.832371] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1918ff50-92f7-44b8-a43f-b5784363368b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.864838] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a150c35-31ab-46d4-8def-f03993eab76f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.872782] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02c37f8-05cf-4a5e-a229-efa3286a00bb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.887163] env[61594]: DEBUG nova.compute.provider_tree [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.887695] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c67a7f7e306341a2b7de419e0c106931 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.904956] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c67a7f7e306341a2b7de419e0c106931 [ 866.906094] env[61594]: DEBUG nova.scheduler.client.report [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 866.908434] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg cae46e8863a745ac887c64217070c0fa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.927404] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cae46e8863a745ac887c64217070c0fa [ 866.928468] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.251s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 866.929045] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 866.931316] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 95fd23dea3df4e99815f3ff70886203b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.963827] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 95fd23dea3df4e99815f3ff70886203b [ 866.965252] env[61594]: DEBUG nova.compute.utils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 866.965879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg e69bbc5e38da48e5a29f8c4fb38f6fdb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 866.966814] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 866.967025] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 866.980760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e69bbc5e38da48e5a29f8c4fb38f6fdb [ 866.981400] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 866.983052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ef2d029486d14c6cbe5249a30170a6e2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 867.011729] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ef2d029486d14c6cbe5249a30170a6e2 [ 867.014717] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg c643da3a110a4e23812c95fd4565111a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 867.045070] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c643da3a110a4e23812c95fd4565111a [ 867.046494] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 867.069191] env[61594]: DEBUG nova.policy [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84f05a61e0f6499bb35c44d254226249', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bff3be1976444e58a2b7be93d47f50ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 867.075941] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 867.076298] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 867.076488] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.076674] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 867.076817] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.076965] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 867.077185] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 867.077347] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 867.077513] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 867.077675] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 867.077846] env[61594]: DEBUG nova.virt.hardware [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 867.078725] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d267b1-d491-4a9a-aad9-265e5c710b0d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.088189] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c162e884-5191-44ed-8608-04e9fd12a44a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.580050] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.580286] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.580436] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 868.145813] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Successfully created port: 7a5102a3-39f6-4a6e-9f3a-634c590edb10 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.544750] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.316520] env[61594]: DEBUG nova.compute.manager [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Received event network-changed-ba385b5e-d417-4b48-aa68-2c36ff17e231 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 869.316785] env[61594]: DEBUG nova.compute.manager [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Refreshing instance network info cache due to event network-changed-ba385b5e-d417-4b48-aa68-2c36ff17e231. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 869.316925] env[61594]: DEBUG oslo_concurrency.lockutils [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] Acquiring lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.317169] env[61594]: DEBUG oslo_concurrency.lockutils [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] Acquired lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.317354] env[61594]: DEBUG nova.network.neutron [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Refreshing network info cache for port ba385b5e-d417-4b48-aa68-2c36ff17e231 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 869.317780] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] Expecting reply to msg 3e5c05b048fa4e5bb7d834146dabcf86 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 869.325218] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3e5c05b048fa4e5bb7d834146dabcf86 [ 869.356316] env[61594]: DEBUG nova.network.neutron [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 869.460406] env[61594]: DEBUG nova.network.neutron [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.461584] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] Expecting reply to msg 9f74310af54442a48e032a7c3a4d31cd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 869.475386] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9f74310af54442a48e032a7c3a4d31cd [ 869.476068] env[61594]: DEBUG oslo_concurrency.lockutils [req-d46b80ad-d60f-4cd7-b0f7-68c2108d20c8 req-a24f9f78-1c64-4920-ace5-560e7087bf56 service nova] Releasing lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.543906] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.005104] env[61594]: ERROR nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. [ 870.005104] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 870.005104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 870.005104] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 870.005104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 870.005104] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 870.005104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 870.005104] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 870.005104] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 870.005104] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 870.005104] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 870.005104] env[61594]: ERROR nova.compute.manager raise self.value [ 870.005104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 870.005104] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 870.005104] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 870.005104] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 870.005684] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 870.005684] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 870.005684] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. [ 870.005684] env[61594]: ERROR nova.compute.manager [ 870.005684] env[61594]: Traceback (most recent call last): [ 870.005684] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 870.005684] env[61594]: listener.cb(fileno) [ 870.005684] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 870.005684] env[61594]: result = function(*args, **kwargs) [ 870.005684] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 870.005684] env[61594]: return func(*args, **kwargs) [ 870.005684] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 870.005684] env[61594]: raise e [ 870.005684] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 870.005684] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 870.005684] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 870.005684] env[61594]: created_port_ids = self._update_ports_for_instance( [ 870.005684] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 870.005684] env[61594]: with excutils.save_and_reraise_exception(): [ 870.005684] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 870.005684] env[61594]: self.force_reraise() [ 870.005684] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 870.005684] env[61594]: raise self.value [ 870.005684] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 870.005684] env[61594]: updated_port = self._update_port( [ 870.005684] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 870.005684] env[61594]: _ensure_no_port_binding_failure(port) [ 870.005684] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 870.005684] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 870.006470] env[61594]: nova.exception.PortBindingFailed: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. [ 870.006470] env[61594]: Removing descriptor: 24 [ 870.006470] env[61594]: ERROR nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Traceback (most recent call last): [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] yield resources [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self.driver.spawn(context, instance, image_meta, [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 870.006470] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] vm_ref = self.build_virtual_machine(instance, [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] vif_infos = vmwarevif.get_vif_info(self._session, [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] for vif in network_info: [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return self._sync_wrapper(fn, *args, **kwargs) [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self.wait() [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self[:] = self._gt.wait() [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return self._exit_event.wait() [ 870.006807] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] result = hub.switch() [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return self.greenlet.switch() [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] result = function(*args, **kwargs) [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return func(*args, **kwargs) [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] raise e [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] nwinfo = self.network_api.allocate_for_instance( [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 870.007217] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] created_port_ids = self._update_ports_for_instance( [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] with excutils.save_and_reraise_exception(): [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self.force_reraise() [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] raise self.value [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] updated_port = self._update_port( [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] _ensure_no_port_binding_failure(port) [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 870.007643] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] raise exception.PortBindingFailed(port_id=port['id']) [ 870.007940] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] nova.exception.PortBindingFailed: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. [ 870.007940] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] [ 870.007940] env[61594]: INFO nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Terminating instance [ 870.011918] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Acquiring lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.012431] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Acquired lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.012431] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 870.012696] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 004aa64029f341b0a98f9f196b930cde in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.027958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 004aa64029f341b0a98f9f196b930cde [ 870.246849] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.499650] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.500125] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 50fe746430854001a59acc3f1450437d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.509440] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 50fe746430854001a59acc3f1450437d [ 870.510166] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Releasing lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.510576] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 870.510796] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 870.511418] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7ebf3fa4-1e84-4965-9f1d-09cb6d7b089b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.521758] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7883dfb-92f4-43ba-bb1a-178b9ab43291 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.547727] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.547911] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 870.548023] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 870.549069] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 8295061b624f4db48df812746f46dd06 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.550201] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0a85edf7-72d1-471b-b543-8a91bf585cba could not be found. [ 870.550289] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 870.550405] env[61594]: INFO nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Took 0.04 seconds to destroy the instance on the hypervisor. [ 870.551313] env[61594]: DEBUG oslo.service.loopingcall [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 870.551413] env[61594]: DEBUG nova.compute.manager [-] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 870.551509] env[61594]: DEBUG nova.network.neutron [-] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 870.569518] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8295061b624f4db48df812746f46dd06 [ 870.571317] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 870.571460] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 870.571622] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 870.571762] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 870.571882] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 870.572019] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 870.573389] env[61594]: DEBUG nova.network.neutron [-] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.575629] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg b0d426cabc1c40639823a2adf6d848e5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.575629] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.575629] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.575629] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 870.576052] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f293f8c9721e4e0198f09c28a422a841 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.582570] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0d426cabc1c40639823a2adf6d848e5 [ 870.582994] env[61594]: DEBUG nova.network.neutron [-] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.583420] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg f0026ed699324b70a8c9ee742a4df525 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.591921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f293f8c9721e4e0198f09c28a422a841 [ 870.592461] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f0026ed699324b70a8c9ee742a4df525 [ 870.593468] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.593690] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.596020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.596020] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 870.596020] env[61594]: INFO nova.compute.manager [-] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Took 0.04 seconds to deallocate network for instance. [ 870.596375] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c72df95b-01a1-4734-8930-bd5557906663 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.601785] env[61594]: DEBUG nova.compute.claims [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 870.602042] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.602399] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.604946] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 9d2b02e67c464d6e80126fc6c06a5ce1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.613255] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffd560b-43a4-48d1-a470-d834591ba471 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.632280] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ebbf20-b7c7-4abb-abbd-889fea67c91f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.637011] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0abf86-472e-4d7e-b378-dcbe904fee33 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.669652] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d2b02e67c464d6e80126fc6c06a5ce1 [ 870.670218] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181484MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 870.670367] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.768057] env[61594]: DEBUG nova.scheduler.client.report [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Refreshing inventories for resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 870.790822] env[61594]: DEBUG nova.scheduler.client.report [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Updating ProviderTree inventory for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 870.790822] env[61594]: DEBUG nova.compute.provider_tree [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Updating inventory in ProviderTree for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.807850] env[61594]: DEBUG nova.scheduler.client.report [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Refreshing aggregate associations for resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be, aggregates: None {{(pid=61594) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 870.827056] env[61594]: DEBUG nova.scheduler.client.report [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Refreshing trait associations for resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=61594) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 870.919763] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b27005-da64-4af6-9a15-ad2b45349cc7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.927400] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a99fdb-a881-4383-9894-7389f00d6000 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.957745] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a327e1-54be-465b-bd9a-2f4818c66429 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.965593] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c1dd3f7-6cde-4b88-8b41-1946596c0167 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.980128] env[61594]: DEBUG nova.compute.provider_tree [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 870.980683] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 80e0371541d84853a1f67cc3a91c7115 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 870.989530] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 80e0371541d84853a1f67cc3a91c7115 [ 870.990521] env[61594]: DEBUG nova.scheduler.client.report [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 870.993497] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 5306376d52a94975bd8435e976782903 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.014834] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5306376d52a94975bd8435e976782903 [ 871.015729] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.413s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.016418] env[61594]: ERROR nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Traceback (most recent call last): [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self.driver.spawn(context, instance, image_meta, [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] vm_ref = self.build_virtual_machine(instance, [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] vif_infos = vmwarevif.get_vif_info(self._session, [ 871.016418] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] for vif in network_info: [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return self._sync_wrapper(fn, *args, **kwargs) [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self.wait() [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self[:] = self._gt.wait() [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return self._exit_event.wait() [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] result = hub.switch() [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 871.016746] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return self.greenlet.switch() [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] result = function(*args, **kwargs) [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] return func(*args, **kwargs) [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] raise e [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] nwinfo = self.network_api.allocate_for_instance( [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] created_port_ids = self._update_ports_for_instance( [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] with excutils.save_and_reraise_exception(): [ 871.017132] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] self.force_reraise() [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] raise self.value [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] updated_port = self._update_port( [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] _ensure_no_port_binding_failure(port) [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] raise exception.PortBindingFailed(port_id=port['id']) [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] nova.exception.PortBindingFailed: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. [ 871.017424] env[61594]: ERROR nova.compute.manager [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] [ 871.017665] env[61594]: DEBUG nova.compute.utils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 871.018698] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.348s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.019106] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f35ac236c10146b18c10f01b6d0c3294 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.020646] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Build of instance 0a85edf7-72d1-471b-b543-8a91bf585cba was re-scheduled: Binding failed for port ba385b5e-d417-4b48-aa68-2c36ff17e231, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 871.020646] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 871.024023] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Acquiring lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.024023] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Acquired lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.024023] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 871.024023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 0c6360805a1e44f3bca15963ca1188d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.030477] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0c6360805a1e44f3bca15963ca1188d8 [ 871.048755] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f35ac236c10146b18c10f01b6d0c3294 [ 871.052169] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg ed71b888d95b4373b0e1beecbf58a9d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.066530] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 871.069152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ed71b888d95b4373b0e1beecbf58a9d3 [ 871.088723] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance d364fc2e-89d7-4b2e-a510-19148a8f1a2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 871.088882] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance dc31ebf5-889b-438b-9f54-6df807714a38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 871.089230] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 0f6368a9-cadc-46b4-be16-017724580876 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 871.089575] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 1983f1e344474435a814190613535e36 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.101160] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1983f1e344474435a814190613535e36 [ 871.101960] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 0a85edf7-72d1-471b-b543-8a91bf585cba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 871.102144] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 98cb1c0a-6fba-4e56-9211-6581d3eee790 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 871.102404] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 871.102673] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 871.191942] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72506929-4034-492e-8804-7966f673101a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.201038] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7370257c-a9f8-47c8-9bd0-bc7ea0421d98 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.240255] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e5caa9-8a1c-4e1c-b310-f732581467bb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.249786] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6908d0-3b16-4925-beee-63568cabb570 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.265471] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.266033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg b075691768db436ca6cd19836df11fbc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.275968] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b075691768db436ca6cd19836df11fbc [ 871.276945] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 871.279342] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 2f85dfbb8c32490090bc50d13b6c26b3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.300950] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2f85dfbb8c32490090bc50d13b6c26b3 [ 871.302242] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 871.302285] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.284s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.400273] env[61594]: ERROR nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. [ 871.400273] env[61594]: ERROR nova.compute.manager Traceback (most recent call last): [ 871.400273] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 871.400273] env[61594]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 871.400273] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 871.400273] env[61594]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 871.400273] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 871.400273] env[61594]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 871.400273] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.400273] env[61594]: ERROR nova.compute.manager self.force_reraise() [ 871.400273] env[61594]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.400273] env[61594]: ERROR nova.compute.manager raise self.value [ 871.400273] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 871.400273] env[61594]: ERROR nova.compute.manager updated_port = self._update_port( [ 871.400273] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 871.400273] env[61594]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 871.400713] env[61594]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 871.400713] env[61594]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 871.400713] env[61594]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. [ 871.400713] env[61594]: ERROR nova.compute.manager [ 871.400713] env[61594]: Traceback (most recent call last): [ 871.400713] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 871.400713] env[61594]: listener.cb(fileno) [ 871.400713] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 871.400713] env[61594]: result = function(*args, **kwargs) [ 871.400713] env[61594]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 871.400713] env[61594]: return func(*args, **kwargs) [ 871.400713] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 871.400713] env[61594]: raise e [ 871.400713] env[61594]: File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 871.400713] env[61594]: nwinfo = self.network_api.allocate_for_instance( [ 871.400713] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 871.400713] env[61594]: created_port_ids = self._update_ports_for_instance( [ 871.400713] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 871.400713] env[61594]: with excutils.save_and_reraise_exception(): [ 871.400713] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.400713] env[61594]: self.force_reraise() [ 871.400713] env[61594]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.400713] env[61594]: raise self.value [ 871.400713] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 871.400713] env[61594]: updated_port = self._update_port( [ 871.400713] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 871.400713] env[61594]: _ensure_no_port_binding_failure(port) [ 871.400713] env[61594]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 871.400713] env[61594]: raise exception.PortBindingFailed(port_id=port['id']) [ 871.401352] env[61594]: nova.exception.PortBindingFailed: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. [ 871.401352] env[61594]: Removing descriptor: 22 [ 871.401352] env[61594]: ERROR nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Traceback (most recent call last): [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] yield resources [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self.driver.spawn(context, instance, image_meta, [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self._vmops.spawn(context, instance, image_meta, injected_files, [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 871.401352] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] vm_ref = self.build_virtual_machine(instance, [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] vif_infos = vmwarevif.get_vif_info(self._session, [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] for vif in network_info: [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return self._sync_wrapper(fn, *args, **kwargs) [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self.wait() [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self[:] = self._gt.wait() [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return self._exit_event.wait() [ 871.401646] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] result = hub.switch() [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return self.greenlet.switch() [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] result = function(*args, **kwargs) [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return func(*args, **kwargs) [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] raise e [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] nwinfo = self.network_api.allocate_for_instance( [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 871.401945] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] created_port_ids = self._update_ports_for_instance( [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] with excutils.save_and_reraise_exception(): [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self.force_reraise() [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] raise self.value [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] updated_port = self._update_port( [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] _ensure_no_port_binding_failure(port) [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 871.402254] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] raise exception.PortBindingFailed(port_id=port['id']) [ 871.402534] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] nova.exception.PortBindingFailed: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. [ 871.402534] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] [ 871.402534] env[61594]: INFO nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Terminating instance [ 871.405886] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.405983] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.406170] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 871.406618] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 3101195fba2d45688096cb01a604ffb6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.408851] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.409316] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 601c721ff07d475f95418f6df658b450 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.418233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3101195fba2d45688096cb01a604ffb6 [ 871.425909] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 601c721ff07d475f95418f6df658b450 [ 871.426488] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Releasing lock "refresh_cache-0a85edf7-72d1-471b-b543-8a91bf585cba" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.426688] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 871.426920] env[61594]: DEBUG nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 871.428099] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 871.441439] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 871.461316] env[61594]: DEBUG nova.compute.manager [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Received event network-changed-7a5102a3-39f6-4a6e-9f3a-634c590edb10 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 871.461535] env[61594]: DEBUG nova.compute.manager [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Refreshing instance network info cache due to event network-changed-7a5102a3-39f6-4a6e-9f3a-634c590edb10. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 871.461770] env[61594]: DEBUG oslo_concurrency.lockutils [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] Acquiring lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.463114] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 871.463396] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 23639e3e7ab34fca8dca521153f1bb00 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.477711] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 23639e3e7ab34fca8dca521153f1bb00 [ 871.477711] env[61594]: DEBUG nova.network.neutron [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.477900] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg c434fba43e3e44778faf6c74f361d413 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.490097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c434fba43e3e44778faf6c74f361d413 [ 871.490768] env[61594]: INFO nova.compute.manager [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] [instance: 0a85edf7-72d1-471b-b543-8a91bf585cba] Took 0.06 seconds to deallocate network for instance. [ 871.492618] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg b73f6f3e8f32495388a32e392ab6ea2b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.546373] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b73f6f3e8f32495388a32e392ab6ea2b [ 871.549294] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg 1eb62381ee12458b945605810e29c947 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.601301] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1eb62381ee12458b945605810e29c947 [ 871.605437] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.605937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 9faff963984449f0bd25440575c794dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.614972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9faff963984449f0bd25440575c794dd [ 871.615535] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.615931] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 871.616147] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 871.616484] env[61594]: DEBUG oslo_concurrency.lockutils [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] Acquired lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.616770] env[61594]: DEBUG nova.network.neutron [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Refreshing network info cache for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 871.617068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] Expecting reply to msg 146bf7c71b6a44328455e5f8788990d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.618636] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7107e49-dcf5-440d-8eec-e8190de1a31e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.630263] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1b07ec-36bc-4ddb-9b36-1650e41708b5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.643020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 146bf7c71b6a44328455e5f8788990d8 [ 871.644105] env[61594]: INFO nova.scheduler.client.report [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Deleted allocations for instance 0a85edf7-72d1-471b-b543-8a91bf585cba [ 871.651698] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Expecting reply to msg db662e9453504780af2ff93471926d5d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.665689] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98cb1c0a-6fba-4e56-9211-6581d3eee790 could not be found. [ 871.665918] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 871.666112] env[61594]: INFO nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Took 0.05 seconds to destroy the instance on the hypervisor. [ 871.666372] env[61594]: DEBUG oslo.service.loopingcall [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 871.666878] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db662e9453504780af2ff93471926d5d [ 871.667246] env[61594]: DEBUG nova.compute.manager [-] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 871.667341] env[61594]: DEBUG nova.network.neutron [-] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 871.669091] env[61594]: DEBUG oslo_concurrency.lockutils [None req-13371d3e-b164-4012-b292-aa3a66806598 tempest-AttachInterfacesUnderV243Test-1251072297 tempest-AttachInterfacesUnderV243Test-1251072297-project-member] Lock "0a85edf7-72d1-471b-b543-8a91bf585cba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 7.126s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.674413] env[61594]: DEBUG nova.network.neutron [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 871.691223] env[61594]: DEBUG nova.network.neutron [-] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 871.691760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4bf45c8d81c14daa8cd96fea4fd6f352 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.702450] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bf45c8d81c14daa8cd96fea4fd6f352 [ 871.702846] env[61594]: DEBUG nova.network.neutron [-] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.703263] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 4b5154946ead4b44b6d01ea0cc6dd231 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.719111] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4b5154946ead4b44b6d01ea0cc6dd231 [ 871.719111] env[61594]: INFO nova.compute.manager [-] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Took 0.05 seconds to deallocate network for instance. [ 871.720667] env[61594]: DEBUG nova.compute.claims [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 871.720667] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.720898] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.722789] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg db4da95fa82247098436e729c0f73a20 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.772716] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg db4da95fa82247098436e729c0f73a20 [ 871.824857] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquiring lock "23641d1e-bbca-4887-95c6-5a6cac0ce6a1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.825484] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Lock "23641d1e-bbca-4887-95c6-5a6cac0ce6a1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.825622] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg d82453c2130a49da9a3362dca399c772 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.835609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d82453c2130a49da9a3362dca399c772 [ 871.836018] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 871.837742] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg a605f13537c442c2ae548aa9e0ee0123 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.849864] env[61594]: DEBUG nova.network.neutron [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.850837] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] Expecting reply to msg fae8e6505bca45e5bf9278f69c7fb15c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.863084] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fae8e6505bca45e5bf9278f69c7fb15c [ 871.863622] env[61594]: DEBUG oslo_concurrency.lockutils [req-8d9ca9c0-2ddf-4448-83f7-1cee2398c99b req-00869925-b3ea-4415-b7b8-3955a8037694 service nova] Releasing lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.872526] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a605f13537c442c2ae548aa9e0ee0123 [ 871.876785] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6ea72c-bc2b-41c2-b651-28ca84c1d5d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.888124] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcee93d-7dc5-4c6a-8795-aef76ff93fba {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.891912] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.921531] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0336be2-55d4-4188-942d-9e1ab3e466c2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.929257] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca19fe24-4ab7-410e-89b0-92adff4dc17f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.942998] env[61594]: DEBUG nova.compute.provider_tree [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.943530] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 9d8f0c4a062340478271940c70ffe20a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.952323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9d8f0c4a062340478271940c70ffe20a [ 871.953292] env[61594]: DEBUG nova.scheduler.client.report [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 871.955812] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg d003aba26c3547019aadc11e4bf4f7dc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.967075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d003aba26c3547019aadc11e4bf4f7dc [ 871.968251] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.247s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.969227] env[61594]: ERROR nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Traceback (most recent call last): [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self.driver.spawn(context, instance, image_meta, [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self._vmops.spawn(context, instance, image_meta, injected_files, [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] vm_ref = self.build_virtual_machine(instance, [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] vif_infos = vmwarevif.get_vif_info(self._session, [ 871.969227] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] for vif in network_info: [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return self._sync_wrapper(fn, *args, **kwargs) [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self.wait() [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self[:] = self._gt.wait() [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 224, in wait [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return self._exit_event.wait() [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] result = hub.switch() [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 871.969557] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return self.greenlet.switch() [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 264, in main [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] result = function(*args, **kwargs) [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] return func(*args, **kwargs) [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/compute/manager.py", line 2002, in _allocate_network_async [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] raise e [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/compute/manager.py", line 1980, in _allocate_network_async [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] nwinfo = self.network_api.allocate_for_instance( [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] created_port_ids = self._update_ports_for_instance( [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] with excutils.save_and_reraise_exception(): [ 871.970033] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] self.force_reraise() [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] raise self.value [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] updated_port = self._update_port( [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] _ensure_no_port_binding_failure(port) [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] raise exception.PortBindingFailed(port_id=port['id']) [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] nova.exception.PortBindingFailed: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. [ 871.970370] env[61594]: ERROR nova.compute.manager [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] [ 871.970682] env[61594]: DEBUG nova.compute.utils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 871.972028] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.080s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.973468] env[61594]: INFO nova.compute.claims [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.975060] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 8410dc1e4c594387a7e0a00eb7352206 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.976645] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Build of instance 98cb1c0a-6fba-4e56-9211-6581d3eee790 was re-scheduled: Binding failed for port 7a5102a3-39f6-4a6e-9f3a-634c590edb10, please check neutron logs for more information. {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 871.977082] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 871.977229] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquiring lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.977375] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Acquired lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.977531] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 871.978033] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg ca22609bc0524c9f9dbf61f2c7562778 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 871.984592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ca22609bc0524c9f9dbf61f2c7562778 [ 872.003178] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 872.016252] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8410dc1e4c594387a7e0a00eb7352206 [ 872.018282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 4382f0f906af4a6ab280bb25dfa1ba80 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.024957] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4382f0f906af4a6ab280bb25dfa1ba80 [ 872.101254] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7b23b2-dff5-4abe-877e-413b4a096dca {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.108624] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.109130] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 910a4a1f84be4cdfb9764f13e46affb5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.111104] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6e8b39-82b2-4a6d-9ec6-3fd951973eb0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.141493] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 910a4a1f84be4cdfb9764f13e46affb5 [ 872.142394] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370bf2d5-715a-45ae-a182-f931ddaa0406 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.145105] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Releasing lock "refresh_cache-98cb1c0a-6fba-4e56-9211-6581d3eee790" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.145325] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 872.145512] env[61594]: DEBUG nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 872.145679] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 872.152708] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ed38a4-ab6d-407d-9264-2b40e3d4339f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.166202] env[61594]: DEBUG nova.compute.provider_tree [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 872.166682] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 5084db991a76498eb7a781bf2004cdb8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.168648] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 872.169163] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg b4478637789d476d9e689a7f11601370 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.174508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5084db991a76498eb7a781bf2004cdb8 [ 872.175218] env[61594]: DEBUG nova.scheduler.client.report [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 872.177624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 61c117b87e524a9da185f5608f6eb368 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.178808] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b4478637789d476d9e689a7f11601370 [ 872.179227] env[61594]: DEBUG nova.network.neutron [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.179610] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 22bd028ed7bc4fe488a860b8b4bf39d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.188669] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22bd028ed7bc4fe488a860b8b4bf39d7 [ 872.189210] env[61594]: INFO nova.compute.manager [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] [instance: 98cb1c0a-6fba-4e56-9211-6581d3eee790] Took 0.04 seconds to deallocate network for instance. [ 872.190882] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 86de492cd4b34b57bdf1aac11cbf4069 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.192408] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 61c117b87e524a9da185f5608f6eb368 [ 872.193222] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.221s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.193688] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 872.195323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 8b338ce303c342c0a6cbb9054dfa92c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.220811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 86de492cd4b34b57bdf1aac11cbf4069 [ 872.223512] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg cb640f88befc4b4592a42165ff323b69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.225451] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b338ce303c342c0a6cbb9054dfa92c9 [ 872.226595] env[61594]: DEBUG nova.compute.utils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 872.227192] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 07a08773565241c38d55f39dc5a13749 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.228364] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 872.228645] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 872.237710] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 07a08773565241c38d55f39dc5a13749 [ 872.238237] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 872.239772] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 5e6c536ad4724ff38d4d043cf5316a39 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.270491] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.271888] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb640f88befc4b4592a42165ff323b69 [ 872.282547] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5e6c536ad4724ff38d4d043cf5316a39 [ 872.285431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 79bd5e46e3374b2b9166bc4a9ba2a886 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.288500] env[61594]: DEBUG nova.policy [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40821b3f2b084dcca46c911428a61ced', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd11c0741810b4ebe8fd2839f38dcdf4d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 872.291434] env[61594]: INFO nova.scheduler.client.report [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Deleted allocations for instance 98cb1c0a-6fba-4e56-9211-6581d3eee790 [ 872.297173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Expecting reply to msg 4a58879c3d1c4ac28c49eba3709489b1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 872.324819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4a58879c3d1c4ac28c49eba3709489b1 [ 872.324819] env[61594]: DEBUG oslo_concurrency.lockutils [None req-498aebc2-92d3-492c-b157-18d89d0f6de4 tempest-DeleteServersTestJSON-215265594 tempest-DeleteServersTestJSON-215265594-project-member] Lock "98cb1c0a-6fba-4e56-9211-6581d3eee790" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 5.720s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.328102] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 79bd5e46e3374b2b9166bc4a9ba2a886 [ 872.328786] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 872.355232] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 872.355515] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 872.355685] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.355930] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 872.356134] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.356307] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 872.356698] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 872.356769] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 872.356968] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 872.357160] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 872.357338] env[61594]: DEBUG nova.virt.hardware [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 872.359020] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58455154-2962-4829-b11d-8411115e53e8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.367224] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ad38ec-f4c3-4b94-8b24-6dad0877bdbf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.653686] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Successfully created port: b6d084c9-cd60-4c3f-808e-e17582fbd223 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 873.594225] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Successfully updated port: b6d084c9-cd60-4c3f-808e-e17582fbd223 {{(pid=61594) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.595299] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 082036523e344eebb3d4b052016cf48b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 873.612293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 082036523e344eebb3d4b052016cf48b [ 873.613107] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquiring lock "refresh_cache-23641d1e-bbca-4887-95c6-5a6cac0ce6a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.613348] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquired lock "refresh_cache-23641d1e-bbca-4887-95c6-5a6cac0ce6a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.613540] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 873.614105] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg b0ca48209b6d48efba0ef3c0e6454c32 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 873.626310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b0ca48209b6d48efba0ef3c0e6454c32 [ 873.663387] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 873.850694] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Updating instance_info_cache with network_info: [{"id": "b6d084c9-cd60-4c3f-808e-e17582fbd223", "address": "fa:16:3e:81:87:3d", "network": {"id": "ae0d06cd-78fe-4e69-9e26-40f1a8dce15c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1200049902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d11c0741810b4ebe8fd2839f38dcdf4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6d084c9-cd", "ovs_interfaceid": "b6d084c9-cd60-4c3f-808e-e17582fbd223", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.851205] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg e9397026ceb1441fb1e2e8e1ff75b82e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 873.867234] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9397026ceb1441fb1e2e8e1ff75b82e [ 873.867860] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Releasing lock "refresh_cache-23641d1e-bbca-4887-95c6-5a6cac0ce6a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.868156] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Instance network_info: |[{"id": "b6d084c9-cd60-4c3f-808e-e17582fbd223", "address": "fa:16:3e:81:87:3d", "network": {"id": "ae0d06cd-78fe-4e69-9e26-40f1a8dce15c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1200049902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d11c0741810b4ebe8fd2839f38dcdf4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6d084c9-cd", "ovs_interfaceid": "b6d084c9-cd60-4c3f-808e-e17582fbd223", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 873.868618] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:87:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4db2961d-273d-4634-9d06-a94fa9d384fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6d084c9-cd60-4c3f-808e-e17582fbd223', 'vif_model': 'vmxnet3'}] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 873.877228] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Creating folder: Project (d11c0741810b4ebe8fd2839f38dcdf4d). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 873.877760] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06534bea-899f-44dd-b8dd-d873d9ebca92 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.888374] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Created folder: Project (d11c0741810b4ebe8fd2839f38dcdf4d) in parent group-v277030. [ 873.888556] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Creating folder: Instances. Parent ref: group-v277058. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 873.888779] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-169c334e-6b5d-4b64-ac96-815ddc10f54b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.898363] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Created folder: Instances in parent group-v277058. [ 873.898597] env[61594]: DEBUG oslo.service.loopingcall [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 873.898779] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 873.898968] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d31c26f-482f-466e-9be6-cca61e7c65f0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.921794] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.921794] env[61594]: value = "task-1291430" [ 873.921794] env[61594]: _type = "Task" [ 873.921794] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.926507] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291430, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.428939] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291430, 'name': CreateVM_Task, 'duration_secs': 0.44167} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.429145] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 874.436417] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.436598] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.436919] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 874.437191] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0962db5d-f863-4ffc-994b-4feb834a6751 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.441847] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Waiting for the task: (returnval){ [ 874.441847] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52c5135b-db72-7055-2be1-b3d5c4572238" [ 874.441847] env[61594]: _type = "Task" [ 874.441847] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.449539] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52c5135b-db72-7055-2be1-b3d5c4572238, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.951979] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.952268] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.952458] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.521730] env[61594]: DEBUG nova.compute.manager [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Received event network-vif-plugged-b6d084c9-cd60-4c3f-808e-e17582fbd223 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 875.521857] env[61594]: DEBUG oslo_concurrency.lockutils [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Acquiring lock "23641d1e-bbca-4887-95c6-5a6cac0ce6a1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 875.522373] env[61594]: DEBUG oslo_concurrency.lockutils [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Lock "23641d1e-bbca-4887-95c6-5a6cac0ce6a1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.524305] env[61594]: DEBUG oslo_concurrency.lockutils [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Lock "23641d1e-bbca-4887-95c6-5a6cac0ce6a1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.524305] env[61594]: DEBUG nova.compute.manager [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] No waiting events found dispatching network-vif-plugged-b6d084c9-cd60-4c3f-808e-e17582fbd223 {{(pid=61594) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 875.524305] env[61594]: WARNING nova.compute.manager [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Received unexpected event network-vif-plugged-b6d084c9-cd60-4c3f-808e-e17582fbd223 for instance with vm_state building and task_state spawning. [ 875.524305] env[61594]: DEBUG nova.compute.manager [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Received event network-changed-b6d084c9-cd60-4c3f-808e-e17582fbd223 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 875.524560] env[61594]: DEBUG nova.compute.manager [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Refreshing instance network info cache due to event network-changed-b6d084c9-cd60-4c3f-808e-e17582fbd223. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 875.524560] env[61594]: DEBUG oslo_concurrency.lockutils [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Acquiring lock "refresh_cache-23641d1e-bbca-4887-95c6-5a6cac0ce6a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.524560] env[61594]: DEBUG oslo_concurrency.lockutils [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Acquired lock "refresh_cache-23641d1e-bbca-4887-95c6-5a6cac0ce6a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.524560] env[61594]: DEBUG nova.network.neutron [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Refreshing network info cache for port b6d084c9-cd60-4c3f-808e-e17582fbd223 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 875.524560] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Expecting reply to msg 35865a98fbb342a1b451b6bcfecf7039 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 875.535099] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 35865a98fbb342a1b451b6bcfecf7039 [ 876.161635] env[61594]: DEBUG nova.network.neutron [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Updated VIF entry in instance network info cache for port b6d084c9-cd60-4c3f-808e-e17582fbd223. {{(pid=61594) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 876.162077] env[61594]: DEBUG nova.network.neutron [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Updating instance_info_cache with network_info: [{"id": "b6d084c9-cd60-4c3f-808e-e17582fbd223", "address": "fa:16:3e:81:87:3d", "network": {"id": "ae0d06cd-78fe-4e69-9e26-40f1a8dce15c", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1200049902-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "d11c0741810b4ebe8fd2839f38dcdf4d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4db2961d-273d-4634-9d06-a94fa9d384fb", "external-id": "nsx-vlan-transportzone-572", "segmentation_id": 572, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6d084c9-cd", "ovs_interfaceid": "b6d084c9-cd60-4c3f-808e-e17582fbd223", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.162547] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Expecting reply to msg 8b1db1628ee143999ac6af78c17a4f8d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 876.172250] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8b1db1628ee143999ac6af78c17a4f8d [ 876.172851] env[61594]: DEBUG oslo_concurrency.lockutils [req-e4cb28c5-ebc5-445e-a711-9a6e22db3598 req-383d5e4d-53d6-4ca3-ac25-9d02947ac6ce service nova] Releasing lock "refresh_cache-23641d1e-bbca-4887-95c6-5a6cac0ce6a1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.033498] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.033853] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.034594] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg d441116c06794a76a772525976e6486d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.047863] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d441116c06794a76a772525976e6486d [ 880.048347] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 880.050613] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg f5dff335ee3642a88dcbdf60a293646b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.083558] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "299afd65-10d4-4602-9a7e-b5d12e88a823" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.083795] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.084273] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg d633893724e74177954c8f3c429256db in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.095093] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d633893724e74177954c8f3c429256db [ 880.095537] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 880.097290] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 1e17da9a6666437ba3e7a2280bc17857 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.098385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f5dff335ee3642a88dcbdf60a293646b [ 880.118354] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.118596] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.120402] env[61594]: INFO nova.compute.claims [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.122127] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 1ae371e15f4848fc91af4103d8efd1c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.159027] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1ae371e15f4848fc91af4103d8efd1c9 [ 880.159489] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e17da9a6666437ba3e7a2280bc17857 [ 880.161407] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 30c51924b1974fcb9ea8103d446254d8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.175757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30c51924b1974fcb9ea8103d446254d8 [ 880.182180] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "1ac59594-4fbc-4a99-9e73-657185d4f218" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.182402] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.182843] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 6e358d43af5a42209bbfbaa40cb15855 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.184580] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.194542] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e358d43af5a42209bbfbaa40cb15855 [ 880.194999] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 880.196603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 969288427e6a407bbdd98a6a3ae32809 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.235836] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 969288427e6a407bbdd98a6a3ae32809 [ 880.268922] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 880.313072] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19fddac-0f89-4b0b-9434-6149325d02e9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.319443] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231e5e98-79ba-43d8-8b00-517276161b0a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.355502] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9ceec3-d788-4ef0-bfae-664a5b297489 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.363359] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9b45fd-0e36-4052-a963-e52a38afcfef {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.378844] env[61594]: DEBUG nova.compute.provider_tree [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.379380] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 38ccb189fe2a47ecb6fb8592ba860851 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.389520] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 38ccb189fe2a47ecb6fb8592ba860851 [ 880.390678] env[61594]: DEBUG nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 880.394123] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 145c4185cca7411ab182efa5ec00457e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.405744] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 145c4185cca7411ab182efa5ec00457e [ 880.406565] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.288s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.407088] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 880.409335] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 4e81cdc3d7ff402b866111bccc21fa2e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.410254] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.226s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.411795] env[61594]: INFO nova.compute.claims [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.413326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 6ca1b025ecc04457a125abe6dd65b8ff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.454466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ca1b025ecc04457a125abe6dd65b8ff [ 880.455065] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4e81cdc3d7ff402b866111bccc21fa2e [ 880.456871] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 98a19c4fff4b4a8e843341b242347f7c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.459666] env[61594]: DEBUG nova.compute.utils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.460497] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg cfcd6a72b97343a9911d0b39d2f63cbe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.462730] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 880.462997] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 880.467498] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 98a19c4fff4b4a8e843341b242347f7c [ 880.473059] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cfcd6a72b97343a9911d0b39d2f63cbe [ 880.474897] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 880.476921] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 8adcd6455192405abcb41e612ed7bd80 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.536792] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8adcd6455192405abcb41e612ed7bd80 [ 880.539581] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 506b07c0bd1746e9926ce580b08ec970 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.553230] env[61594]: DEBUG nova.policy [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '474d41dd1c6f42a6bdfa525e522b2d38', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c837b52a6dc84bda829c0012a3537199', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 880.575228] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 506b07c0bd1746e9926ce580b08ec970 [ 880.576486] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 880.609893] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.610318] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.610318] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.610477] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.610622] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.610809] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.611234] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.611927] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.612398] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.612631] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.612873] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.614443] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edebe2ef-632d-48af-a6a4-60e3d0323ff4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.619704] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868252e3-2baf-45f4-af67-033baab86a40 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.629660] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61da612-d2fb-405f-a701-483e20ad61c4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.633803] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce973370-fccc-4ad5-b34d-cdf5ed65715d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.674913] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52b72d0-95d7-4814-bd98-8a918c19b386 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.683377] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc2f0a7d-1779-414b-9d78-5a6cce994862 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.698301] env[61594]: DEBUG nova.compute.provider_tree [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.698940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 2a1bcfc9932d446b811ac71bd9733a44 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.712819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2a1bcfc9932d446b811ac71bd9733a44 [ 880.713842] env[61594]: DEBUG nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 880.716562] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 4bc6ccabaa2145e69dd9cd6ba2eb0980 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.742528] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bc6ccabaa2145e69dd9cd6ba2eb0980 [ 880.743540] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.333s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.744093] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 880.745793] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg ec17cd5371d64a1b8d3fab4961d70799 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.746730] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.478s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.751031] env[61594]: INFO nova.compute.claims [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 880.751031] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 390324e6fbd34bd4ae4086351bad6f59 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.795648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ec17cd5371d64a1b8d3fab4961d70799 [ 880.795648] env[61594]: DEBUG nova.compute.utils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 880.795880] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg c07a5bcb318440da8040bdca40fd9603 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.796713] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 880.796808] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 880.801125] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 390324e6fbd34bd4ae4086351bad6f59 [ 880.802890] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 1299ebec361c4d538799627c8a11fdf4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.809582] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c07a5bcb318440da8040bdca40fd9603 [ 880.809901] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 880.811718] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 0de95b6b3e3d41d5a83739a347d08e8e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.813496] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1299ebec361c4d538799627c8a11fdf4 [ 880.859992] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0de95b6b3e3d41d5a83739a347d08e8e [ 880.863831] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 30fb3db253154f9bafb0fc6bd271c51b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 880.895511] env[61594]: DEBUG nova.policy [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '474d41dd1c6f42a6bdfa525e522b2d38', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c837b52a6dc84bda829c0012a3537199', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 880.922809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 30fb3db253154f9bafb0fc6bd271c51b [ 880.924011] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 880.955121] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f80f94-25a2-4ac8-8a08-ec48cb71ce07 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.959796] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.960103] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.960271] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.960454] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.960601] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.960776] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.961046] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.961303] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.961500] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.961679] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.961871] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.962675] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8245f231-bf51-4292-ba98-b4d63105ebea {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.972128] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0683a45-a33b-41bd-a182-9fc47dbdefd6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.976418] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c3cd97-b16a-49dd-a273-3cd270d5939b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.015869] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4219d2ba-ad88-435a-a9cd-58b1bf497c55 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.024140] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d10cc35-b6ed-4005-a581-539eff793c49 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.036997] env[61594]: DEBUG nova.compute.provider_tree [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.037763] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg f673e753c2a64b53b6a2fd89f524aaa0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 881.039061] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Successfully created port: 5439bf7f-9646-4187-a524-99bb1003f120 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.052515] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f673e753c2a64b53b6a2fd89f524aaa0 [ 881.052638] env[61594]: DEBUG nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 881.055073] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 7e27b6897aab4136a9e575516827a919 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 881.067354] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7e27b6897aab4136a9e575516827a919 [ 881.068117] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.321s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 881.068588] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 881.074024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 55e4336f21484e9bbf4c586098bbad69 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 881.110930] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55e4336f21484e9bbf4c586098bbad69 [ 881.112063] env[61594]: DEBUG nova.compute.utils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.112654] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg dcf48067d0f440638bbd84f0a9956377 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 881.113497] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 881.113669] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 881.131666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dcf48067d0f440638bbd84f0a9956377 [ 881.132324] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 881.133970] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 00f5d5dc46bb47419f3821e49dad851a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 881.179633] env[61594]: DEBUG nova.policy [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '474d41dd1c6f42a6bdfa525e522b2d38', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c837b52a6dc84bda829c0012a3537199', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 881.181513] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 00f5d5dc46bb47419f3821e49dad851a [ 881.186016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg f49844fef70048a6a7194d7809189265 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 881.225077] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f49844fef70048a6a7194d7809189265 [ 881.226257] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 881.254709] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 881.255140] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 881.255551] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.255791] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 881.255937] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.256237] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 881.256485] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 881.256654] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 881.256823] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 881.257178] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 881.257512] env[61594]: DEBUG nova.virt.hardware [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 881.258275] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ec6cac-f388-4a0c-b054-504879e724a4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.267245] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbef0ee-92f3-4b26-af51-10717d92e759 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.524417] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Successfully created port: 6f22b9ff-4120-41b8-98e5-32b052723166 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.007669] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Successfully created port: 79f8a0f5-133b-408a-9343-481e78244799 {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 882.302472] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "3a9e32f2-4300-4b44-ae16-67792000eb08" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.302755] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.303189] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg cff89c61185d4050a6fb180c1882f030 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.315687] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cff89c61185d4050a6fb180c1882f030 [ 882.316220] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 882.318006] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg f6983293dafb44b39a35aa6862c4549b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.321121] env[61594]: DEBUG nova.compute.manager [req-990d71c7-7786-49d6-a894-2993338e179c req-e8d974ca-5ddf-43e6-8ff7-711db999e6ec service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Received event network-vif-plugged-6f22b9ff-4120-41b8-98e5-32b052723166 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 882.321325] env[61594]: DEBUG oslo_concurrency.lockutils [req-990d71c7-7786-49d6-a894-2993338e179c req-e8d974ca-5ddf-43e6-8ff7-711db999e6ec service nova] Acquiring lock "1ac59594-4fbc-4a99-9e73-657185d4f218-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.321526] env[61594]: DEBUG oslo_concurrency.lockutils [req-990d71c7-7786-49d6-a894-2993338e179c req-e8d974ca-5ddf-43e6-8ff7-711db999e6ec service nova] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.321691] env[61594]: DEBUG oslo_concurrency.lockutils [req-990d71c7-7786-49d6-a894-2993338e179c req-e8d974ca-5ddf-43e6-8ff7-711db999e6ec service nova] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.321863] env[61594]: DEBUG nova.compute.manager [req-990d71c7-7786-49d6-a894-2993338e179c req-e8d974ca-5ddf-43e6-8ff7-711db999e6ec service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] No waiting events found dispatching network-vif-plugged-6f22b9ff-4120-41b8-98e5-32b052723166 {{(pid=61594) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.322045] env[61594]: WARNING nova.compute.manager [req-990d71c7-7786-49d6-a894-2993338e179c req-e8d974ca-5ddf-43e6-8ff7-711db999e6ec service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Received unexpected event network-vif-plugged-6f22b9ff-4120-41b8-98e5-32b052723166 for instance with vm_state building and task_state spawning. [ 882.351675] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6983293dafb44b39a35aa6862c4549b [ 882.367653] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.367904] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.369589] env[61594]: INFO nova.compute.claims [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.371785] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 85942d7d72de4eb7ba425078d8032acc in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.410817] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Successfully updated port: 6f22b9ff-4120-41b8-98e5-32b052723166 {{(pid=61594) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.411332] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 5980d6bc9bdc49d5a237af03e76622f6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.416383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85942d7d72de4eb7ba425078d8032acc [ 882.418609] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 1364fd40f72647108eec60bde388e111 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.426340] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5980d6bc9bdc49d5a237af03e76622f6 [ 882.426917] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "refresh_cache-1ac59594-4fbc-4a99-9e73-657185d4f218" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.427173] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "refresh_cache-1ac59594-4fbc-4a99-9e73-657185d4f218" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.427232] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 882.427577] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 3c661b9b1159424c84719a86fb539c5a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.428455] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1364fd40f72647108eec60bde388e111 [ 882.434944] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c661b9b1159424c84719a86fb539c5a [ 882.471470] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 882.571384] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca19e82d-e167-499b-bf91-20b9ae102f95 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.579566] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-344d2104-a0a7-4940-8b34-8d99ee4faac1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.614213] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd887e4-05f9-4708-87bf-3ef1179383ac {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.618123] env[61594]: DEBUG nova.compute.manager [req-7efc136c-ee2a-4ebb-872b-50837db06917 req-7c87259d-24cc-4aca-9334-17f84f2ecc7b service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Received event network-vif-plugged-5439bf7f-9646-4187-a524-99bb1003f120 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 882.618346] env[61594]: DEBUG oslo_concurrency.lockutils [req-7efc136c-ee2a-4ebb-872b-50837db06917 req-7c87259d-24cc-4aca-9334-17f84f2ecc7b service nova] Acquiring lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.618733] env[61594]: DEBUG oslo_concurrency.lockutils [req-7efc136c-ee2a-4ebb-872b-50837db06917 req-7c87259d-24cc-4aca-9334-17f84f2ecc7b service nova] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.618992] env[61594]: DEBUG oslo_concurrency.lockutils [req-7efc136c-ee2a-4ebb-872b-50837db06917 req-7c87259d-24cc-4aca-9334-17f84f2ecc7b service nova] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.619206] env[61594]: DEBUG nova.compute.manager [req-7efc136c-ee2a-4ebb-872b-50837db06917 req-7c87259d-24cc-4aca-9334-17f84f2ecc7b service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] No waiting events found dispatching network-vif-plugged-5439bf7f-9646-4187-a524-99bb1003f120 {{(pid=61594) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 882.619439] env[61594]: WARNING nova.compute.manager [req-7efc136c-ee2a-4ebb-872b-50837db06917 req-7c87259d-24cc-4aca-9334-17f84f2ecc7b service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Received unexpected event network-vif-plugged-5439bf7f-9646-4187-a524-99bb1003f120 for instance with vm_state building and task_state spawning. [ 882.626256] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-154f2d60-0461-4382-b5a2-939215bd0b10 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.641798] env[61594]: DEBUG nova.compute.provider_tree [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.642592] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg ced5f4d69eb24039845a57728aa00030 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.651616] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ced5f4d69eb24039845a57728aa00030 [ 882.652541] env[61594]: DEBUG nova.scheduler.client.report [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 882.655383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg e51389a067074a90a42649c6900ec796 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.670300] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e51389a067074a90a42649c6900ec796 [ 882.671479] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.671964] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 882.673714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 886059f1d19247088756ddffa644ed36 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.710617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 886059f1d19247088756ddffa644ed36 [ 882.712039] env[61594]: DEBUG nova.compute.utils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 882.712590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 55b4b982797e4e7b8a293499973a5411 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.717020] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 882.717020] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 882.730464] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Updating instance_info_cache with network_info: [{"id": "6f22b9ff-4120-41b8-98e5-32b052723166", "address": "fa:16:3e:a4:44:8a", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f22b9ff-41", "ovs_interfaceid": "6f22b9ff-4120-41b8-98e5-32b052723166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 882.730998] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg b6f86b959e7048be85fd5853fcc2179a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.732448] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Successfully updated port: 5439bf7f-9646-4187-a524-99bb1003f120 {{(pid=61594) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.732809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg abd139d582d64f228fe64b46d3385f12 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.737787] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 55b4b982797e4e7b8a293499973a5411 [ 882.737787] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 882.737907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 4fdb94d9f79e462591ccb887d9a7fd56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.747886] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg abd139d582d64f228fe64b46d3385f12 [ 882.748506] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "refresh_cache-cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 882.748668] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "refresh_cache-cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.748828] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 882.749203] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 9fec279dc2cc4550a28cf2923d575cdb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.756838] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b6f86b959e7048be85fd5853fcc2179a [ 882.757283] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fec279dc2cc4550a28cf2923d575cdb [ 882.757765] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "refresh_cache-1ac59594-4fbc-4a99-9e73-657185d4f218" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.758369] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Instance network_info: |[{"id": "6f22b9ff-4120-41b8-98e5-32b052723166", "address": "fa:16:3e:a4:44:8a", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f22b9ff-41", "ovs_interfaceid": "6f22b9ff-4120-41b8-98e5-32b052723166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 882.760294] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a4:44:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f22b9ff-4120-41b8-98e5-32b052723166', 'vif_model': 'vmxnet3'}] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.767696] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating folder: Project (c837b52a6dc84bda829c0012a3537199). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 882.768245] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebbe2aad-b93f-4178-a8d1-fadd2f41aa57 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.773142] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4fdb94d9f79e462591ccb887d9a7fd56 [ 882.775809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 37fd202920f44d7ca53cbaa05892e717 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 882.779332] env[61594]: DEBUG nova.policy [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a820ad08ac294a1da60fe38fcaceccbb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '97b1a06e12d741dbb654564a7ec4bfc6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 882.784805] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created folder: Project (c837b52a6dc84bda829c0012a3537199) in parent group-v277030. [ 882.785320] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating folder: Instances. Parent ref: group-v277061. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 882.785320] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cee16b67-0309-4da8-a8df-0bfbfb9bcb09 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.796372] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created folder: Instances in parent group-v277061. [ 882.796955] env[61594]: DEBUG oslo.service.loopingcall [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 882.797205] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 882.797591] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-842fd5c5-65a8-4c04-95da-df78b8602d9e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.812895] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37fd202920f44d7ca53cbaa05892e717 [ 882.814409] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 882.823828] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.823828] env[61594]: value = "task-1291433" [ 882.823828] env[61594]: _type = "Task" [ 882.823828] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.832065] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291433, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.835573] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 882.840085] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.840314] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.840471] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.840655] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.840804] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.840950] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.841170] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.841330] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.841494] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.841821] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.841821] env[61594]: DEBUG nova.virt.hardware [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.842600] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcfc1606-db82-4be2-a295-f6c99b5cc608 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.850689] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0210af1-3336-4217-aab4-38fa059efb9c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.132626] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Successfully created port: 192c9881-63e1-478b-8703-7bb52b3d903d {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 883.147146] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Updating instance_info_cache with network_info: [{"id": "5439bf7f-9646-4187-a524-99bb1003f120", "address": "fa:16:3e:01:e7:7c", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5439bf7f-96", "ovs_interfaceid": "5439bf7f-9646-4187-a524-99bb1003f120", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.149844] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 26cc246b6dfe479cb61b206ba9177ab3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 883.165826] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26cc246b6dfe479cb61b206ba9177ab3 [ 883.166521] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "refresh_cache-cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.166824] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Instance network_info: |[{"id": "5439bf7f-9646-4187-a524-99bb1003f120", "address": "fa:16:3e:01:e7:7c", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5439bf7f-96", "ovs_interfaceid": "5439bf7f-9646-4187-a524-99bb1003f120", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 883.167240] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:e7:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5439bf7f-9646-4187-a524-99bb1003f120', 'vif_model': 'vmxnet3'}] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.175018] env[61594]: DEBUG oslo.service.loopingcall [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.175541] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 883.175770] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3001c16-c256-4262-95ea-8c5df950f4d5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.200484] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.200484] env[61594]: value = "task-1291434" [ 883.200484] env[61594]: _type = "Task" [ 883.200484] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.208505] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291434, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.253231] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Successfully updated port: 79f8a0f5-133b-408a-9343-481e78244799 {{(pid=61594) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.253891] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 70c0719e002d418a8fc598d5f290d1bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 883.271376] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70c0719e002d418a8fc598d5f290d1bd [ 883.273020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "refresh_cache-299afd65-10d4-4602-9a7e-b5d12e88a823" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.273020] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "refresh_cache-299afd65-10d4-4602-9a7e-b5d12e88a823" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.273020] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.273020] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 62665a298175442fa3b8b9709bd7acbd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 883.284648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 62665a298175442fa3b8b9709bd7acbd [ 883.316120] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.333156] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291433, 'name': CreateVM_Task, 'duration_secs': 0.319985} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.333272] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 883.333887] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.334058] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.334360] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.334615] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a05048c3-bbcd-4e8e-bfb8-fd886332a38b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.341347] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 883.341347] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52faf64d-2f59-6097-b11d-94092e32a85d" [ 883.341347] env[61594]: _type = "Task" [ 883.341347] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.349068] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52faf64d-2f59-6097-b11d-94092e32a85d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.493757] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Updating instance_info_cache with network_info: [{"id": "79f8a0f5-133b-408a-9343-481e78244799", "address": "fa:16:3e:2b:ee:c8", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f8a0f5-13", "ovs_interfaceid": "79f8a0f5-133b-408a-9343-481e78244799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.494345] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg a65d8f10116a4952808f16fe24de960e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 883.505282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a65d8f10116a4952808f16fe24de960e [ 883.505940] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "refresh_cache-299afd65-10d4-4602-9a7e-b5d12e88a823" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.506312] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Instance network_info: |[{"id": "79f8a0f5-133b-408a-9343-481e78244799", "address": "fa:16:3e:2b:ee:c8", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f8a0f5-13", "ovs_interfaceid": "79f8a0f5-133b-408a-9343-481e78244799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 883.506666] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:ee:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79f8a0f5-133b-408a-9343-481e78244799', 'vif_model': 'vmxnet3'}] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.515467] env[61594]: DEBUG oslo.service.loopingcall [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 883.515982] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 883.516236] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-335baf44-377d-497b-8fd7-23b3b7b959de {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.537183] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.537183] env[61594]: value = "task-1291435" [ 883.537183] env[61594]: _type = "Task" [ 883.537183] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.544520] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291435, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.710604] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291434, 'name': CreateVM_Task, 'duration_secs': 0.315445} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.710831] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 883.712613] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.760014] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Successfully updated port: 192c9881-63e1-478b-8703-7bb52b3d903d {{(pid=61594) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.760811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 0da42816ba93474f9a961b61fbc2e867 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 883.771110] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0da42816ba93474f9a961b61fbc2e867 [ 883.771830] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "refresh_cache-3a9e32f2-4300-4b44-ae16-67792000eb08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.771976] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquired lock "refresh_cache-3a9e32f2-4300-4b44-ae16-67792000eb08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.772141] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.772519] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg e27cc35ea9164a53a61b8e038e3bc0f1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 883.780561] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e27cc35ea9164a53a61b8e038e3bc0f1 [ 883.818555] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 883.851383] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.851626] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 883.851880] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.852104] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.852395] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 883.852631] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5543e305-a5d8-42b1-a3b6-0609669bb9a1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.857059] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 883.857059] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52ba8c5f-851a-39ff-22cd-8ceb2d5ca31e" [ 883.857059] env[61594]: _type = "Task" [ 883.857059] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.864293] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52ba8c5f-851a-39ff-22cd-8ceb2d5ca31e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.984655] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Updating instance_info_cache with network_info: [{"id": "192c9881-63e1-478b-8703-7bb52b3d903d", "address": "fa:16:3e:9d:2c:c4", "network": {"id": "9ea0024c-737d-405a-811b-6157d8c9387b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-656571684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b1a06e12d741dbb654564a7ec4bfc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192c9881-63", "ovs_interfaceid": "192c9881-63e1-478b-8703-7bb52b3d903d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.985321] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 837d0da5579a4714ab280cca0718eb5c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 883.995535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 837d0da5579a4714ab280cca0718eb5c [ 883.996198] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Releasing lock "refresh_cache-3a9e32f2-4300-4b44-ae16-67792000eb08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.996583] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Instance network_info: |[{"id": "192c9881-63e1-478b-8703-7bb52b3d903d", "address": "fa:16:3e:9d:2c:c4", "network": {"id": "9ea0024c-737d-405a-811b-6157d8c9387b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-656571684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b1a06e12d741dbb654564a7ec4bfc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192c9881-63", "ovs_interfaceid": "192c9881-63e1-478b-8703-7bb52b3d903d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 883.997056] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:2c:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd829efb7-e98e-4b67-bd03-b0888287dbfd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '192c9881-63e1-478b-8703-7bb52b3d903d', 'vif_model': 'vmxnet3'}] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.004460] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Creating folder: Project (97b1a06e12d741dbb654564a7ec4bfc6). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 884.004949] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb7908ff-4792-4efa-a6fe-b268437283da {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.016082] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Created folder: Project (97b1a06e12d741dbb654564a7ec4bfc6) in parent group-v277030. [ 884.016260] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Creating folder: Instances. Parent ref: group-v277066. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 884.016473] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42e1e8b4-e9f1-4946-9165-ba406d113a76 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.025665] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Created folder: Instances in parent group-v277066. [ 884.025882] env[61594]: DEBUG oslo.service.loopingcall [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.026078] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 884.026272] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-934353ad-5e18-4ed1-ba26-97c0965ee43c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.047566] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291435, 'name': CreateVM_Task, 'duration_secs': 0.314204} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.048482] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 884.048673] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 884.048673] env[61594]: value = "task-1291438" [ 884.048673] env[61594]: _type = "Task" [ 884.048673] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.049276] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.056261] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291438, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.367478] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.367478] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.367836] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.367836] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.368314] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 884.368602] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba5683c-ec40-416c-b743-7fcfd6a3671d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.372937] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 884.372937] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]521c0a6b-e19a-5ff5-0893-30ccc83ead2d" [ 884.372937] env[61594]: _type = "Task" [ 884.372937] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.382290] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]521c0a6b-e19a-5ff5-0893-30ccc83ead2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.457389] env[61594]: DEBUG nova.compute.manager [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Received event network-changed-6f22b9ff-4120-41b8-98e5-32b052723166 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 884.457591] env[61594]: DEBUG nova.compute.manager [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Refreshing instance network info cache due to event network-changed-6f22b9ff-4120-41b8-98e5-32b052723166. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 884.457787] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Acquiring lock "refresh_cache-1ac59594-4fbc-4a99-9e73-657185d4f218" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.457930] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Acquired lock "refresh_cache-1ac59594-4fbc-4a99-9e73-657185d4f218" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.458203] env[61594]: DEBUG nova.network.neutron [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Refreshing network info cache for port 6f22b9ff-4120-41b8-98e5-32b052723166 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.458753] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Expecting reply to msg 4dc9f9f4a6bd40ccb61aedbdac815b8c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 884.465641] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4dc9f9f4a6bd40ccb61aedbdac815b8c [ 884.562469] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291438, 'name': CreateVM_Task, 'duration_secs': 0.322338} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.562646] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 884.563286] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.634542] env[61594]: DEBUG nova.compute.manager [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Received event network-changed-5439bf7f-9646-4187-a524-99bb1003f120 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 884.634732] env[61594]: DEBUG nova.compute.manager [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Refreshing instance network info cache due to event network-changed-5439bf7f-9646-4187-a524-99bb1003f120. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 884.634945] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Acquiring lock "refresh_cache-cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.635163] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Acquired lock "refresh_cache-cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.635344] env[61594]: DEBUG nova.network.neutron [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Refreshing network info cache for port 5439bf7f-9646-4187-a524-99bb1003f120 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.635817] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Expecting reply to msg 991881f3a3d042f88b884c9ef901928d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 884.643152] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 991881f3a3d042f88b884c9ef901928d [ 884.728407] env[61594]: DEBUG nova.network.neutron [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Updated VIF entry in instance network info cache for port 6f22b9ff-4120-41b8-98e5-32b052723166. {{(pid=61594) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 884.728763] env[61594]: DEBUG nova.network.neutron [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Updating instance_info_cache with network_info: [{"id": "6f22b9ff-4120-41b8-98e5-32b052723166", "address": "fa:16:3e:a4:44:8a", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f22b9ff-41", "ovs_interfaceid": "6f22b9ff-4120-41b8-98e5-32b052723166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.729345] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Expecting reply to msg c7337393007b4db0873a94adc49d9b50 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 884.737578] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7337393007b4db0873a94adc49d9b50 [ 884.738224] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Releasing lock "refresh_cache-1ac59594-4fbc-4a99-9e73-657185d4f218" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.738448] env[61594]: DEBUG nova.compute.manager [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Received event network-vif-plugged-192c9881-63e1-478b-8703-7bb52b3d903d {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 884.738642] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Acquiring lock "3a9e32f2-4300-4b44-ae16-67792000eb08-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.738835] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.738999] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.739190] env[61594]: DEBUG nova.compute.manager [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] No waiting events found dispatching network-vif-plugged-192c9881-63e1-478b-8703-7bb52b3d903d {{(pid=61594) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.739358] env[61594]: WARNING nova.compute.manager [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Received unexpected event network-vif-plugged-192c9881-63e1-478b-8703-7bb52b3d903d for instance with vm_state building and task_state spawning. [ 884.739524] env[61594]: DEBUG nova.compute.manager [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Received event network-changed-192c9881-63e1-478b-8703-7bb52b3d903d {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 884.739678] env[61594]: DEBUG nova.compute.manager [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Refreshing instance network info cache due to event network-changed-192c9881-63e1-478b-8703-7bb52b3d903d. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 884.739860] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Acquiring lock "refresh_cache-3a9e32f2-4300-4b44-ae16-67792000eb08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.739999] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Acquired lock "refresh_cache-3a9e32f2-4300-4b44-ae16-67792000eb08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.740179] env[61594]: DEBUG nova.network.neutron [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Refreshing network info cache for port 192c9881-63e1-478b-8703-7bb52b3d903d {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.740604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Expecting reply to msg e4d93e763a7e4c7e87687bba2703634b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 884.747534] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e4d93e763a7e4c7e87687bba2703634b [ 884.887018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.887018] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.887018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.887018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.887230] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 884.887230] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ceba6d38-137c-49da-804f-7bb9ab014e7a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.891587] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Waiting for the task: (returnval){ [ 884.891587] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]5202c9f3-4954-0ff7-8ea9-dab6a1c9df14" [ 884.891587] env[61594]: _type = "Task" [ 884.891587] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.902261] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]5202c9f3-4954-0ff7-8ea9-dab6a1c9df14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.945655] env[61594]: DEBUG nova.network.neutron [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Updated VIF entry in instance network info cache for port 5439bf7f-9646-4187-a524-99bb1003f120. {{(pid=61594) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 884.946040] env[61594]: DEBUG nova.network.neutron [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Updating instance_info_cache with network_info: [{"id": "5439bf7f-9646-4187-a524-99bb1003f120", "address": "fa:16:3e:01:e7:7c", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5439bf7f-96", "ovs_interfaceid": "5439bf7f-9646-4187-a524-99bb1003f120", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.946576] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Expecting reply to msg da89eb3c53ee4af48e424a752696c9f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 884.955650] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da89eb3c53ee4af48e424a752696c9f4 [ 884.956274] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Releasing lock "refresh_cache-cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.956507] env[61594]: DEBUG nova.compute.manager [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Received event network-vif-plugged-79f8a0f5-133b-408a-9343-481e78244799 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 884.956700] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Acquiring lock "299afd65-10d4-4602-9a7e-b5d12e88a823-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.956903] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.957079] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.957253] env[61594]: DEBUG nova.compute.manager [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] No waiting events found dispatching network-vif-plugged-79f8a0f5-133b-408a-9343-481e78244799 {{(pid=61594) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 884.957422] env[61594]: WARNING nova.compute.manager [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Received unexpected event network-vif-plugged-79f8a0f5-133b-408a-9343-481e78244799 for instance with vm_state building and task_state spawning. [ 884.957587] env[61594]: DEBUG nova.compute.manager [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Received event network-changed-79f8a0f5-133b-408a-9343-481e78244799 {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 884.957743] env[61594]: DEBUG nova.compute.manager [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Refreshing instance network info cache due to event network-changed-79f8a0f5-133b-408a-9343-481e78244799. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 884.957926] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Acquiring lock "refresh_cache-299afd65-10d4-4602-9a7e-b5d12e88a823" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 884.958073] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Acquired lock "refresh_cache-299afd65-10d4-4602-9a7e-b5d12e88a823" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.958233] env[61594]: DEBUG nova.network.neutron [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Refreshing network info cache for port 79f8a0f5-133b-408a-9343-481e78244799 {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 884.958760] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Expecting reply to msg c4495d270cf24da1ba56cada15e69520 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 884.966058] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c4495d270cf24da1ba56cada15e69520 [ 885.151944] env[61594]: DEBUG nova.network.neutron [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Updated VIF entry in instance network info cache for port 192c9881-63e1-478b-8703-7bb52b3d903d. {{(pid=61594) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 885.152343] env[61594]: DEBUG nova.network.neutron [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Updating instance_info_cache with network_info: [{"id": "192c9881-63e1-478b-8703-7bb52b3d903d", "address": "fa:16:3e:9d:2c:c4", "network": {"id": "9ea0024c-737d-405a-811b-6157d8c9387b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-656571684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "97b1a06e12d741dbb654564a7ec4bfc6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d829efb7-e98e-4b67-bd03-b0888287dbfd", "external-id": "nsx-vlan-transportzone-128", "segmentation_id": 128, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap192c9881-63", "ovs_interfaceid": "192c9881-63e1-478b-8703-7bb52b3d903d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.152861] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Expecting reply to msg 6fe71bfa30f248f0ae1a51a30be6544c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 885.161558] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6fe71bfa30f248f0ae1a51a30be6544c [ 885.162152] env[61594]: DEBUG oslo_concurrency.lockutils [req-49d9e2ad-85a1-45e2-a66b-64e0db282837 req-1bed3610-cdfe-48fd-a6cf-d8c896003e77 service nova] Releasing lock "refresh_cache-3a9e32f2-4300-4b44-ae16-67792000eb08" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.381984] env[61594]: DEBUG nova.network.neutron [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Updated VIF entry in instance network info cache for port 79f8a0f5-133b-408a-9343-481e78244799. {{(pid=61594) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 885.382351] env[61594]: DEBUG nova.network.neutron [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Updating instance_info_cache with network_info: [{"id": "79f8a0f5-133b-408a-9343-481e78244799", "address": "fa:16:3e:2b:ee:c8", "network": {"id": "65d7e9a3-8658-43ce-9f39-2ca2c47d44e4", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-550653923-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c837b52a6dc84bda829c0012a3537199", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79f8a0f5-13", "ovs_interfaceid": "79f8a0f5-133b-408a-9343-481e78244799", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.382848] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Expecting reply to msg f19de58eae9b4f93a09c1d8517dd197d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 885.391229] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f19de58eae9b4f93a09c1d8517dd197d [ 885.391792] env[61594]: DEBUG oslo_concurrency.lockutils [req-f71fab30-3f20-4263-80c1-e5a357fe756b req-ca80b99b-cc96-4b16-ba6a-ae5a3c800b74 service nova] Releasing lock "refresh_cache-299afd65-10d4-4602-9a7e-b5d12e88a823" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.401967] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 885.402208] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.402418] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.675530] env[61594]: WARNING oslo_vmware.rw_handles [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 906.675530] env[61594]: ERROR oslo_vmware.rw_handles [ 906.676235] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 906.678156] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 906.678420] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Copying Virtual Disk [datastore1] vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/d37aaf21-5c99-4338-afd8-7af3988be8e9/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 906.678716] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1dbddf3a-e1ad-40be-9475-601f8442f977 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.687846] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 906.687846] env[61594]: value = "task-1291439" [ 906.687846] env[61594]: _type = "Task" [ 906.687846] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.696090] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': task-1291439, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.198957] env[61594]: DEBUG oslo_vmware.exceptions [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 907.199256] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.199827] env[61594]: ERROR nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 907.199827] env[61594]: Faults: ['InvalidArgument'] [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Traceback (most recent call last): [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] yield resources [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self.driver.spawn(context, instance, image_meta, [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self._fetch_image_if_missing(context, vi) [ 907.199827] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] image_cache(vi, tmp_image_ds_loc) [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] vm_util.copy_virtual_disk( [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] session._wait_for_task(vmdk_copy_task) [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] return self.wait_for_task(task_ref) [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] return evt.wait() [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] result = hub.switch() [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 907.200167] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] return self.greenlet.switch() [ 907.200482] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 907.200482] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self.f(*self.args, **self.kw) [ 907.200482] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 907.200482] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] raise exceptions.translate_fault(task_info.error) [ 907.200482] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 907.200482] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Faults: ['InvalidArgument'] [ 907.200482] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] [ 907.200482] env[61594]: INFO nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Terminating instance [ 907.201766] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.202026] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.202366] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c137389-cd63-459f-9b95-357f8ebc3639 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.204839] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "refresh_cache-d364fc2e-89d7-4b2e-a510-19148a8f1a2b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.205007] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "refresh_cache-d364fc2e-89d7-4b2e-a510-19148a8f1a2b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.205183] env[61594]: DEBUG nova.network.neutron [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 907.205607] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg fdaa4004d0f54c3b9dadb8d0b9602971 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 907.212135] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.212313] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 907.213278] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fdaa4004d0f54c3b9dadb8d0b9602971 [ 907.213631] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3ebf32b-5ecf-43e4-aacb-ae68f8ff0bda {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.220759] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 907.220759] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]522cf878-bf32-ca99-7a6e-a91a59fcfaf1" [ 907.220759] env[61594]: _type = "Task" [ 907.220759] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.230053] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]522cf878-bf32-ca99-7a6e-a91a59fcfaf1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.243683] env[61594]: DEBUG nova.network.neutron [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 907.372855] env[61594]: DEBUG nova.network.neutron [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.373444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 5b74753eb25341eb882c6ef9233abbcd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 907.382265] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b74753eb25341eb882c6ef9233abbcd [ 907.382923] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "refresh_cache-d364fc2e-89d7-4b2e-a510-19148a8f1a2b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.384143] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 907.384143] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 907.384624] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b604836-92db-43f7-ab65-abf57323d666 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.392576] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 907.392804] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-05bfb900-61b1-4da8-a0ef-cf7f75b2fa65 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.424868] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 907.425089] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 907.425277] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Deleting the datastore file [datastore1] d364fc2e-89d7-4b2e-a510-19148a8f1a2b {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.425518] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ba5814a-d69f-4c3e-b8ac-bfef8984e532 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.432074] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 907.432074] env[61594]: value = "task-1291441" [ 907.432074] env[61594]: _type = "Task" [ 907.432074] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.439122] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': task-1291441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.731500] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 907.731878] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating directory with path [datastore1] vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.731979] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4969baba-2f82-465e-8892-4f6991d2871d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.742947] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Created directory with path [datastore1] vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.743148] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Fetch image to [datastore1] vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 907.743322] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 907.744012] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc93400-bdbc-445a-b877-17fb234e9c48 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.750428] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44618c58-29ad-4bd5-a29c-ecd309cfbeed {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.759138] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2a5227-07cc-4a54-842e-ab5b372f9d7f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.788397] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb52f04c-4961-44d8-90be-5368a2eba7d4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.793659] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-df6970b9-ca50-441d-a488-5c9c379e50b2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.824728] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 907.874839] env[61594]: DEBUG oslo_vmware.rw_handles [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 907.937895] env[61594]: DEBUG oslo_vmware.rw_handles [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 907.938094] env[61594]: DEBUG oslo_vmware.rw_handles [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 907.943939] env[61594]: DEBUG oslo_vmware.api [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': task-1291441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.040887} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.944194] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.944378] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 907.944546] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 907.944716] env[61594]: INFO nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Took 0.56 seconds to destroy the instance on the hypervisor. [ 907.944947] env[61594]: DEBUG oslo.service.loopingcall [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.945159] env[61594]: DEBUG nova.compute.manager [-] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 907.947238] env[61594]: DEBUG nova.compute.claims [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 907.947406] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 907.947618] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 907.949695] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 0947a46629ca419483dfc085fe7ce061 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 907.987696] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0947a46629ca419483dfc085fe7ce061 [ 908.090094] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41692456-8093-422a-9669-f4051119bb23 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.097467] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e92b898-5fd4-492c-8e9e-aa238b41d1a4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.127178] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19495392-3e9d-44df-8b1b-8f55598c821c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.134048] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff72c36-03b0-424a-82d1-644697cec102 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.146638] env[61594]: DEBUG nova.compute.provider_tree [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.147126] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 620d337d15df4f719c7ce22605e3342a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 908.155301] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 620d337d15df4f719c7ce22605e3342a [ 908.156142] env[61594]: DEBUG nova.scheduler.client.report [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 908.158295] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 5ac6c0753a74410b9609ed7d23a2c20c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 908.169324] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5ac6c0753a74410b9609ed7d23a2c20c [ 908.169970] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.222s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 908.170494] env[61594]: ERROR nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 908.170494] env[61594]: Faults: ['InvalidArgument'] [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Traceback (most recent call last): [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self.driver.spawn(context, instance, image_meta, [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self._fetch_image_if_missing(context, vi) [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] image_cache(vi, tmp_image_ds_loc) [ 908.170494] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] vm_util.copy_virtual_disk( [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] session._wait_for_task(vmdk_copy_task) [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] return self.wait_for_task(task_ref) [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] return evt.wait() [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] result = hub.switch() [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] return self.greenlet.switch() [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 908.170903] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] self.f(*self.args, **self.kw) [ 908.171275] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 908.171275] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] raise exceptions.translate_fault(task_info.error) [ 908.171275] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 908.171275] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Faults: ['InvalidArgument'] [ 908.171275] env[61594]: ERROR nova.compute.manager [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] [ 908.171275] env[61594]: DEBUG nova.compute.utils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 908.172691] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Build of instance d364fc2e-89d7-4b2e-a510-19148a8f1a2b was re-scheduled: A specified parameter was not correct: fileType [ 908.172691] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 908.173085] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 908.173309] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "refresh_cache-d364fc2e-89d7-4b2e-a510-19148a8f1a2b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 908.173456] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "refresh_cache-d364fc2e-89d7-4b2e-a510-19148a8f1a2b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.173616] env[61594]: DEBUG nova.network.neutron [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.173976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c3abc21c693c47cf960f85582210a56e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 908.179303] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3abc21c693c47cf960f85582210a56e [ 908.197679] env[61594]: DEBUG nova.network.neutron [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.257295] env[61594]: DEBUG nova.network.neutron [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.257797] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 12e7f8a5a11d4dd0b8aed08a6f4bb233 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 908.265807] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 12e7f8a5a11d4dd0b8aed08a6f4bb233 [ 908.266345] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "refresh_cache-d364fc2e-89d7-4b2e-a510-19148a8f1a2b" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.266550] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 908.266732] env[61594]: DEBUG nova.compute.manager [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: d364fc2e-89d7-4b2e-a510-19148a8f1a2b] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 908.268385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 19f776cd658742d49d6df61e9bc0785b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 908.297014] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 19f776cd658742d49d6df61e9bc0785b [ 908.300049] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 33ff31dfe83e450f9f2ad3af8da8cfcd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 908.327516] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 33ff31dfe83e450f9f2ad3af8da8cfcd [ 908.348362] env[61594]: INFO nova.scheduler.client.report [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Deleted allocations for instance d364fc2e-89d7-4b2e-a510-19148a8f1a2b [ 908.355050] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c6bdf0b6af15494c86641427d10e10d7 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 908.368238] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c6bdf0b6af15494c86641427d10e10d7 [ 908.368740] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2179efab-b9e1-4d16-862f-e0ad9e398a90 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "d364fc2e-89d7-4b2e-a510-19148a8f1a2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.804s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.544745] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.545152] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.545152] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 929.540067] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.543697] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 930.543984] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.540152] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 931.540877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 6c2c087476854f63be395ef44577683f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 931.559528] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6c2c087476854f63be395ef44577683f [ 931.562384] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.544442] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.544622] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 932.544741] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 932.545331] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg d8f00a5a03ad43db931877bf763414dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 932.560221] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d8f00a5a03ad43db931877bf763414dd [ 932.562321] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 932.562321] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 932.562403] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 932.562532] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 932.562655] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 932.562781] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 932.562904] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 932.563033] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 932.563538] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.563879] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg bdf0a9de7a5744e0b022c3e5647ad577 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 932.573475] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bdf0a9de7a5744e0b022c3e5647ad577 [ 932.574365] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.574577] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.574745] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.574899] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 932.575984] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6675e14-7a8d-46eb-8203-f4b836367745 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.584652] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebc390c2-fa4f-429a-8294-2b672c69d0cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.598965] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601bc22f-d48c-4c11-983b-bff07ea779e9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.605111] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a147528-0d09-47e9-a24d-fd0264d7e0e2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.636016] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181502MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 932.636016] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.636016] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.636016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 5a47e74ae4ff43cea290a6f54ed94bce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 932.661638] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5a47e74ae4ff43cea290a6f54ed94bce [ 932.664996] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 448c878bfc1a43489e0b0d528ce6f117 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 932.674399] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 448c878bfc1a43489e0b0d528ce6f117 [ 932.695155] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance dc31ebf5-889b-438b-9f54-6df807714a38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 932.695315] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 0f6368a9-cadc-46b4-be16-017724580876 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 932.695445] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 23641d1e-bbca-4887-95c6-5a6cac0ce6a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 932.695568] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 932.695687] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 299afd65-10d4-4602-9a7e-b5d12e88a823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 932.695804] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 1ac59594-4fbc-4a99-9e73-657185d4f218 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 932.695919] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 3a9e32f2-4300-4b44-ae16-67792000eb08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 932.696122] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 932.696262] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 932.779779] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ecd606-64b6-49e9-ae4d-670fd374a372 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.787188] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a2d216-6954-4a25-90ad-36e4b5787b0e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.817337] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14990c8b-4f73-4dea-86e9-ef764f271e4f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.823818] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9e6366-b209-4c69-873f-9966e5424689 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.836334] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.836762] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 9092979981d64246ac5e83979532e4f4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 932.844066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9092979981d64246ac5e83979532e4f4 [ 932.844884] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 932.846998] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 270c417a2837495a9eed6eb1d7342fda in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 932.857278] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 270c417a2837495a9eed6eb1d7342fda [ 932.857897] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 932.858086] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.223s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.838464] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.885068] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg a14e17b04b6e4c4ea9eabdabcf383d25 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 934.894257] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a14e17b04b6e4c4ea9eabdabcf383d25 [ 953.609267] env[61594]: WARNING oslo_vmware.rw_handles [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 953.609267] env[61594]: ERROR oslo_vmware.rw_handles [ 953.610015] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 953.612011] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 953.612352] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Copying Virtual Disk [datastore1] vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/9c94641c-8249-415c-9b89-2aac7fe33b84/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 953.612642] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-536ba769-39e7-4cf5-85c0-bb276f7fc427 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.620464] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 953.620464] env[61594]: value = "task-1291442" [ 953.620464] env[61594]: _type = "Task" [ 953.620464] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.628486] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': task-1291442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.130818] env[61594]: DEBUG oslo_vmware.exceptions [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 954.132093] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.132093] env[61594]: ERROR nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.132093] env[61594]: Faults: ['InvalidArgument'] [ 954.132093] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Traceback (most recent call last): [ 954.132093] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 954.132093] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] yield resources [ 954.132093] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 954.132093] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self.driver.spawn(context, instance, image_meta, [ 954.132093] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 954.132093] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self._fetch_image_if_missing(context, vi) [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] image_cache(vi, tmp_image_ds_loc) [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] vm_util.copy_virtual_disk( [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] session._wait_for_task(vmdk_copy_task) [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] return self.wait_for_task(task_ref) [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] return evt.wait() [ 954.132483] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] result = hub.switch() [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] return self.greenlet.switch() [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self.f(*self.args, **self.kw) [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] raise exceptions.translate_fault(task_info.error) [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Faults: ['InvalidArgument'] [ 954.132898] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] [ 954.132898] env[61594]: INFO nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Terminating instance [ 954.133865] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.134074] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.134318] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbbc0631-4b0d-4516-8c44-111318960e46 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.136358] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "refresh_cache-dc31ebf5-889b-438b-9f54-6df807714a38" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.136514] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "refresh_cache-dc31ebf5-889b-438b-9f54-6df807714a38" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.136679] env[61594]: DEBUG nova.network.neutron [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 954.137097] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 5bc93912f5a645d9961ed97447acb511 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 954.144558] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.144740] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 954.145704] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5bc93912f5a645d9961ed97447acb511 [ 954.146075] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4d76bb9-c1f4-4fe1-800d-41aec6170ce6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.153521] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Waiting for the task: (returnval){ [ 954.153521] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]520534c1-ce66-11d8-6f36-2344c89bdef5" [ 954.153521] env[61594]: _type = "Task" [ 954.153521] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.162914] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]520534c1-ce66-11d8-6f36-2344c89bdef5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.257086] env[61594]: DEBUG nova.network.neutron [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 954.324194] env[61594]: DEBUG nova.network.neutron [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.324743] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 76fe857393534b7397ee6c14f21d691a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 954.333940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76fe857393534b7397ee6c14f21d691a [ 954.334519] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "refresh_cache-dc31ebf5-889b-438b-9f54-6df807714a38" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.334899] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 954.335117] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 954.336189] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d75de7-179f-48d0-94bd-d53e118dddd4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.344581] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 954.344795] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8f2e9d23-a5d3-442d-bd44-d718280faa39 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.381123] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 954.381348] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 954.381529] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Deleting the datastore file [datastore1] dc31ebf5-889b-438b-9f54-6df807714a38 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.381773] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-683cef23-82ae-4eea-8145-5ff3bfb1f4b3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.387594] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for the task: (returnval){ [ 954.387594] env[61594]: value = "task-1291444" [ 954.387594] env[61594]: _type = "Task" [ 954.387594] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.395338] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': task-1291444, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.664173] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 954.664533] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Creating directory with path [datastore1] vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.664590] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3462ec1-042b-407d-be0d-86833d3e9629 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.675985] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Created directory with path [datastore1] vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.676192] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Fetch image to [datastore1] vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 954.676394] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 954.677111] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abac0474-00e3-4c97-bd1e-e82b20629c82 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.684114] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a0cbff-9bb3-47ef-83ba-9b88fef80931 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.693113] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db20aa56-c4a8-4c66-a7c6-c515172b53b8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.723368] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc6872e-cc41-4df5-a2b3-9ac80d4bb088 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.728694] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-89e929a6-5a00-4cbb-87ab-a87e95305303 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.748639] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 954.800160] env[61594]: DEBUG oslo_vmware.rw_handles [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 954.861525] env[61594]: DEBUG oslo_vmware.rw_handles [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 954.861710] env[61594]: DEBUG oslo_vmware.rw_handles [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 954.897576] env[61594]: DEBUG oslo_vmware.api [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Task: {'id': task-1291444, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033393} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.897817] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.898010] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 954.898200] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.898419] env[61594]: INFO nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Took 0.56 seconds to destroy the instance on the hypervisor. [ 954.898623] env[61594]: DEBUG oslo.service.loopingcall [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 954.898822] env[61594]: DEBUG nova.compute.manager [-] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 954.900941] env[61594]: DEBUG nova.compute.claims [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 954.901154] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 954.901367] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.903185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 67b40ce84d294253acd877020d2a0b6d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 954.935688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 67b40ce84d294253acd877020d2a0b6d [ 955.025687] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2db6d4-5023-4641-a33c-cec881210f94 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.033292] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f452d7d-bcf9-47cf-ae18-27e1ce74e000 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.063155] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fe1bbf-65f4-457f-8c10-55d79737de11 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.070519] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda70c0d-1010-4478-b402-a9e9aef7405c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.084902] env[61594]: DEBUG nova.compute.provider_tree [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.085318] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 72bc799afa454e79834f680bd2c33f94 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 955.092465] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 72bc799afa454e79834f680bd2c33f94 [ 955.093391] env[61594]: DEBUG nova.scheduler.client.report [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 955.095668] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg c5fac5cd0c1e4def85b4e15b915292d3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 955.106956] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5fac5cd0c1e4def85b4e15b915292d3 [ 955.107756] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.206s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.108287] env[61594]: ERROR nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 955.108287] env[61594]: Faults: ['InvalidArgument'] [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Traceback (most recent call last): [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self.driver.spawn(context, instance, image_meta, [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self._vmops.spawn(context, instance, image_meta, injected_files, [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self._fetch_image_if_missing(context, vi) [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] image_cache(vi, tmp_image_ds_loc) [ 955.108287] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] vm_util.copy_virtual_disk( [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] session._wait_for_task(vmdk_copy_task) [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] return self.wait_for_task(task_ref) [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] return evt.wait() [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] result = hub.switch() [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] return self.greenlet.switch() [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 955.108680] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] self.f(*self.args, **self.kw) [ 955.109111] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 955.109111] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] raise exceptions.translate_fault(task_info.error) [ 955.109111] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 955.109111] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Faults: ['InvalidArgument'] [ 955.109111] env[61594]: ERROR nova.compute.manager [instance: dc31ebf5-889b-438b-9f54-6df807714a38] [ 955.109111] env[61594]: DEBUG nova.compute.utils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 955.110877] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Build of instance dc31ebf5-889b-438b-9f54-6df807714a38 was re-scheduled: A specified parameter was not correct: fileType [ 955.110877] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 955.111390] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 955.111658] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquiring lock "refresh_cache-dc31ebf5-889b-438b-9f54-6df807714a38" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.111858] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Acquired lock "refresh_cache-dc31ebf5-889b-438b-9f54-6df807714a38" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.112067] env[61594]: DEBUG nova.network.neutron [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 955.112451] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 20afe9b587d84869a4dde658f3f507c1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 955.120050] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20afe9b587d84869a4dde658f3f507c1 [ 955.152498] env[61594]: DEBUG nova.network.neutron [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 955.220862] env[61594]: DEBUG nova.network.neutron [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.221480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 8a9a435fb6de445c81bf01d36290324f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 955.229726] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8a9a435fb6de445c81bf01d36290324f [ 955.230281] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Releasing lock "refresh_cache-dc31ebf5-889b-438b-9f54-6df807714a38" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.230490] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 955.230670] env[61594]: DEBUG nova.compute.manager [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] [instance: dc31ebf5-889b-438b-9f54-6df807714a38] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 955.232313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg fa31663266d9454eb3a49b22929a0796 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 955.261424] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg fa31663266d9454eb3a49b22929a0796 [ 955.263710] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg 08ba602fc6874a6ca258f8896a95639f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 955.290946] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 08ba602fc6874a6ca258f8896a95639f [ 955.315131] env[61594]: INFO nova.scheduler.client.report [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Deleted allocations for instance dc31ebf5-889b-438b-9f54-6df807714a38 [ 955.321322] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Expecting reply to msg a7efe6ac738144a684e8602d2da2a522 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 955.334367] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a7efe6ac738144a684e8602d2da2a522 [ 955.334876] env[61594]: DEBUG oslo_concurrency.lockutils [None req-5ffad98d-ee1e-4988-b3fe-4586f9d58332 tempest-ServerShowV247Test-65457029 tempest-ServerShowV247Test-65457029-project-member] Lock "dc31ebf5-889b-438b-9f54-6df807714a38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 121.529s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.076631] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.076936] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.077446] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg f26e72284f4f41f49380ae2267fc932d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.088061] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f26e72284f4f41f49380ae2267fc932d [ 958.088508] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 958.090215] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 234277f8f9144f08ae48366f9efb8dac in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.120164] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 234277f8f9144f08ae48366f9efb8dac [ 958.135574] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 958.135935] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 958.137407] env[61594]: INFO nova.compute.claims [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 958.138937] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 152c406bf8854db8b32dec6c51323315 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.171025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 152c406bf8854db8b32dec6c51323315 [ 958.172353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg a4ac47c784904632a2fd2833d84d77a9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.179979] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a4ac47c784904632a2fd2833d84d77a9 [ 958.268744] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8674ee-c6dc-460d-b1d7-ae237349f00d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.276241] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38b9dff-5a3d-46e9-86cd-257fad7e5db9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.305132] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d14a0b3-d895-4c92-abf6-15fd0fc6366e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.312405] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02d3719-e79a-471d-8665-48ed89c35f03 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.326470] env[61594]: DEBUG nova.compute.provider_tree [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.326940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 381a56c589794519b773a37f077170a5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.353935] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 381a56c589794519b773a37f077170a5 [ 958.355087] env[61594]: DEBUG nova.scheduler.client.report [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 958.357366] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg c3a19ecba9ef4fa4a6e7e95eff83488b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.370094] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c3a19ecba9ef4fa4a6e7e95eff83488b [ 958.370784] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.235s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.371297] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 958.372867] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 543809e77a744fc7ac0ed921e0c4717e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.401656] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 543809e77a744fc7ac0ed921e0c4717e [ 958.402926] env[61594]: DEBUG nova.compute.utils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 958.403527] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 3c795884d6ad401087e27e4b92fb48ec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.404326] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Not allocating networking since 'none' was specified. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1968}} [ 958.412389] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3c795884d6ad401087e27e4b92fb48ec [ 958.412812] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 958.414384] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg dfc4197dc988413cb52fc73947e036ec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.443108] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dfc4197dc988413cb52fc73947e036ec [ 958.446284] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 6952f1351e7c45c089afa306278d3516 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 958.475025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6952f1351e7c45c089afa306278d3516 [ 958.480313] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 958.514377] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 958.517464] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 958.517464] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.517464] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 958.517464] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.517464] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 958.517713] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 958.517713] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 958.517713] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 958.517713] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 958.517713] env[61594]: DEBUG nova.virt.hardware [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 958.517942] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20343724-b231-4341-838e-e17da9725e57 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.526182] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99793e1c-892f-416a-b5a8-7c0849d3bd19 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.540454] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance VIF info [] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 958.546130] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Creating folder: Project (a2b9019e04914800ae05f83ca5814555). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.546412] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cca2311d-5b3b-4e62-83c9-90497cfab92c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.556518] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Created folder: Project (a2b9019e04914800ae05f83ca5814555) in parent group-v277030. [ 958.556615] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Creating folder: Instances. Parent ref: group-v277069. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 958.556830] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-869e715e-d857-4aca-a49d-7460130f63cc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.565387] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Created folder: Instances in parent group-v277069. [ 958.565608] env[61594]: DEBUG oslo.service.loopingcall [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.565786] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 958.565972] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2e1e6b7-da54-477d-8dea-9e0da3367522 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.582056] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 958.582056] env[61594]: value = "task-1291447" [ 958.582056] env[61594]: _type = "Task" [ 958.582056] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.589043] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291447, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.091714] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291447, 'name': CreateVM_Task, 'duration_secs': 0.248192} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.092092] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 959.092330] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 959.092495] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.092829] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 959.093080] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a198c4b-799f-44cc-81cf-06daade9fbea {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.097375] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Waiting for the task: (returnval){ [ 959.097375] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]523dff73-e6cb-cf29-7025-eee749aa7a04" [ 959.097375] env[61594]: _type = "Task" [ 959.097375] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.104569] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]523dff73-e6cb-cf29-7025-eee749aa7a04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.607887] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.608110] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 959.608328] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 988.545265] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.540060] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.543675] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 989.543833] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 991.543764] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.543674] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.543919] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.544360] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 2e29ff61869a4839adb8feda776b0795 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 992.562222] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e29ff61869a4839adb8feda776b0795 [ 992.563314] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.563535] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.563715] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.563856] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 992.564963] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e912ed3-8727-448c-9559-b03f6899498c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.573811] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2c9b97-2a56-4c9c-b682-d78147cad7a3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.589015] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a160007-d3b8-4bb5-90f8-0c7e297ddaad {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.595345] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286ad4d6-8507-4a09-b3b5-19d0991c2495 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.623495] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181466MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 992.623641] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.623830] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.624660] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg c658d3eb136948d18313a9af6a0f06ed in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 992.655374] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c658d3eb136948d18313a9af6a0f06ed [ 992.658444] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 436c817fbfb349069acd3b958d200b20 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 992.667819] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 436c817fbfb349069acd3b958d200b20 [ 992.688875] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 0f6368a9-cadc-46b4-be16-017724580876 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 992.689047] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 23641d1e-bbca-4887-95c6-5a6cac0ce6a1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 992.689184] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 992.689331] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 299afd65-10d4-4602-9a7e-b5d12e88a823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 992.689461] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 1ac59594-4fbc-4a99-9e73-657185d4f218 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 992.689581] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 3a9e32f2-4300-4b44-ae16-67792000eb08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 992.689699] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 992.689875] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 992.690024] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 992.776557] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70055e75-bca4-4c95-a977-32da72bced17 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.784105] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e2a5e0-c667-4163-96f1-a80ef8e946f7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.814086] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2791e32-2c6c-4ce6-85ec-6ad87ee30891 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.820542] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89da3eb-bdbe-408c-bbe5-2af319dfd9b9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.832908] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.833362] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 0021143358104b51be4bacea87319afa in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 992.840230] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0021143358104b51be4bacea87319afa [ 992.841036] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 992.843207] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg be3a455606c747ecb11979fcc2915197 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 992.853617] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg be3a455606c747ecb11979fcc2915197 [ 992.854231] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 992.854405] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.231s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.854266] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 993.854552] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 993.854552] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 993.855168] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 99ae0ac92919405c8ef70b04eac60c46 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 993.869305] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 99ae0ac92919405c8ef70b04eac60c46 [ 993.871041] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 993.871203] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 993.871374] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 993.871506] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 993.871632] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 993.871755] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 993.871881] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 993.872009] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 993.872509] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 995.544374] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1001.499116] env[61594]: WARNING oslo_vmware.rw_handles [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1001.499116] env[61594]: ERROR oslo_vmware.rw_handles [ 1001.499702] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1001.501699] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1001.501984] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Copying Virtual Disk [datastore1] vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/c84dc306-543a-4fa1-b507-94eb4e10ad6a/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1001.502333] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32196437-ad5d-418f-af11-87ae95c15e63 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.510144] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Waiting for the task: (returnval){ [ 1001.510144] env[61594]: value = "task-1291448" [ 1001.510144] env[61594]: _type = "Task" [ 1001.510144] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.518239] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Task: {'id': task-1291448, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.021041] env[61594]: DEBUG oslo_vmware.exceptions [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1002.021041] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.021182] env[61594]: ERROR nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.021182] env[61594]: Faults: ['InvalidArgument'] [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] Traceback (most recent call last): [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] yield resources [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self.driver.spawn(context, instance, image_meta, [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self._fetch_image_if_missing(context, vi) [ 1002.021182] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] image_cache(vi, tmp_image_ds_loc) [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] vm_util.copy_virtual_disk( [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] session._wait_for_task(vmdk_copy_task) [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] return self.wait_for_task(task_ref) [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] return evt.wait() [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] result = hub.switch() [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1002.021557] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] return self.greenlet.switch() [ 1002.021924] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1002.021924] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self.f(*self.args, **self.kw) [ 1002.021924] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1002.021924] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] raise exceptions.translate_fault(task_info.error) [ 1002.021924] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.021924] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] Faults: ['InvalidArgument'] [ 1002.021924] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] [ 1002.021924] env[61594]: INFO nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Terminating instance [ 1002.023095] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.023302] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.023539] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e32aa84-d525-41b6-a7e2-d26a5cc310fe {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.025924] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquiring lock "refresh_cache-0f6368a9-cadc-46b4-be16-017724580876" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.026105] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquired lock "refresh_cache-0f6368a9-cadc-46b4-be16-017724580876" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.026286] env[61594]: DEBUG nova.network.neutron [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1002.026706] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 7169f105fa474b1c87b8f2b38be29cb0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1002.033052] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.033231] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1002.033920] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bc5c8d7-201a-4514-95a3-08d870884e11 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.036363] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7169f105fa474b1c87b8f2b38be29cb0 [ 1002.041140] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Waiting for the task: (returnval){ [ 1002.041140] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]527a95c4-8749-858f-fbc4-346f79bbc4d5" [ 1002.041140] env[61594]: _type = "Task" [ 1002.041140] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.048818] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]527a95c4-8749-858f-fbc4-346f79bbc4d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.058371] env[61594]: DEBUG nova.network.neutron [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1002.119522] env[61594]: DEBUG nova.network.neutron [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.120035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 11c33e71aa4f461c96dafe3bc1ba4352 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1002.128954] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 11c33e71aa4f461c96dafe3bc1ba4352 [ 1002.129494] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Releasing lock "refresh_cache-0f6368a9-cadc-46b4-be16-017724580876" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1002.129875] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1002.130102] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1002.131122] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ab3f13-cb1f-458e-9e8f-714845aa15b8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.138739] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1002.138955] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77650267-38a8-44d9-8d70-71c66dc80d96 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.165719] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1002.166040] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1002.166285] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Deleting the datastore file [datastore1] 0f6368a9-cadc-46b4-be16-017724580876 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1002.166535] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6563b9d-d732-473f-b7df-1b8b816dd8e3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.172639] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Waiting for the task: (returnval){ [ 1002.172639] env[61594]: value = "task-1291450" [ 1002.172639] env[61594]: _type = "Task" [ 1002.172639] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.180016] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Task: {'id': task-1291450, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.551454] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1002.551822] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Creating directory with path [datastore1] vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.551946] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bebff112-c3ff-419b-9650-3dcd1e5f75c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.567804] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Created directory with path [datastore1] vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.567990] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Fetch image to [datastore1] vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1002.568180] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1002.568894] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808eb81d-36bf-443e-8244-0e085261c28f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.576663] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844c5e3c-3b1b-421c-8d4c-9ba246e00160 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.585316] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb42bd7d-d5d8-4ddb-99d2-2f6087cd195f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.614891] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f46a353-9172-4376-a623-8150a07dac7a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.620102] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1e7c8e2a-27ae-4011-a4c3-65f20e2c099a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.639711] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1002.683381] env[61594]: DEBUG oslo_vmware.api [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Task: {'id': task-1291450, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.043336} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.683635] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.683826] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1002.684015] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1002.684205] env[61594]: INFO nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Took 0.55 seconds to destroy the instance on the hypervisor. [ 1002.684441] env[61594]: DEBUG oslo.service.loopingcall [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1002.684642] env[61594]: DEBUG nova.compute.manager [-] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1002.686671] env[61594]: DEBUG nova.compute.claims [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1002.686849] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.687072] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.688866] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 70fd282a30cb4f1f955587c77566f85a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1002.691399] env[61594]: DEBUG oslo_vmware.rw_handles [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1002.749158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 70fd282a30cb4f1f955587c77566f85a [ 1002.755049] env[61594]: DEBUG oslo_vmware.rw_handles [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1002.755049] env[61594]: DEBUG oslo_vmware.rw_handles [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1002.835756] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000ad18c-582d-47b1-a67d-71ae3ba17fdf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.843232] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45545025-8734-4e82-a2a5-4b2df962325a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.872801] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa58daeb-616c-4099-9666-993213536ed3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.879731] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb3a461-c7ac-449f-a821-8fd35c678381 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.892499] env[61594]: DEBUG nova.compute.provider_tree [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1002.892940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 0b2e5266537e46ebb8aed3fb0e8d32e0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1002.901185] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0b2e5266537e46ebb8aed3fb0e8d32e0 [ 1002.902044] env[61594]: DEBUG nova.scheduler.client.report [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1002.904187] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 29e3d137b6ab4466b086e97a2a2ee34a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1002.914546] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29e3d137b6ab4466b086e97a2a2ee34a [ 1002.915185] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.228s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.915684] env[61594]: ERROR nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.915684] env[61594]: Faults: ['InvalidArgument'] [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] Traceback (most recent call last): [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self.driver.spawn(context, instance, image_meta, [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self._fetch_image_if_missing(context, vi) [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] image_cache(vi, tmp_image_ds_loc) [ 1002.915684] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] vm_util.copy_virtual_disk( [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] session._wait_for_task(vmdk_copy_task) [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] return self.wait_for_task(task_ref) [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] return evt.wait() [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] result = hub.switch() [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] return self.greenlet.switch() [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1002.915983] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] self.f(*self.args, **self.kw) [ 1002.916353] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1002.916353] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] raise exceptions.translate_fault(task_info.error) [ 1002.916353] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1002.916353] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] Faults: ['InvalidArgument'] [ 1002.916353] env[61594]: ERROR nova.compute.manager [instance: 0f6368a9-cadc-46b4-be16-017724580876] [ 1002.916353] env[61594]: DEBUG nova.compute.utils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1002.917830] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Build of instance 0f6368a9-cadc-46b4-be16-017724580876 was re-scheduled: A specified parameter was not correct: fileType [ 1002.917830] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1002.918218] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1002.918436] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquiring lock "refresh_cache-0f6368a9-cadc-46b4-be16-017724580876" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.918583] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Acquired lock "refresh_cache-0f6368a9-cadc-46b4-be16-017724580876" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.918745] env[61594]: DEBUG nova.network.neutron [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1002.919111] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg f543723c044d490eb6ce7e67dfc658e1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1002.924400] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f543723c044d490eb6ce7e67dfc658e1 [ 1002.941736] env[61594]: DEBUG nova.network.neutron [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1003.175105] env[61594]: DEBUG nova.network.neutron [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.175691] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 309acd79605b420a84c10d8b82eabe2f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1003.184071] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 309acd79605b420a84c10d8b82eabe2f [ 1003.184663] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Releasing lock "refresh_cache-0f6368a9-cadc-46b4-be16-017724580876" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.184884] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1003.185392] env[61594]: DEBUG nova.compute.manager [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] [instance: 0f6368a9-cadc-46b4-be16-017724580876] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1003.186791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 3d02a684012e4de0aae4aec1c992480e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1003.217430] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3d02a684012e4de0aae4aec1c992480e [ 1003.220091] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg 1c52f105d09d4f1ba5f383f43c3feaa4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1003.248976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1c52f105d09d4f1ba5f383f43c3feaa4 [ 1003.269102] env[61594]: INFO nova.scheduler.client.report [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Deleted allocations for instance 0f6368a9-cadc-46b4-be16-017724580876 [ 1003.274548] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Expecting reply to msg c7f6b5870aa14e0c81146fd1179b6308 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1003.287911] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c7f6b5870aa14e0c81146fd1179b6308 [ 1003.288400] env[61594]: DEBUG oslo_concurrency.lockutils [None req-599e8cd4-097b-4df0-a473-f7cbf2a8b22e tempest-ServersAaction247Test-1005626433 tempest-ServersAaction247Test-1005626433-project-member] Lock "0f6368a9-cadc-46b4-be16-017724580876" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 157.017s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.545524] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1048.643403] env[61594]: WARNING oslo_vmware.rw_handles [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1048.643403] env[61594]: ERROR oslo_vmware.rw_handles [ 1048.643403] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1048.645766] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1048.646177] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Copying Virtual Disk [datastore1] vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/bafe7840-01f7-442f-84d8-0988444dd817/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1048.646600] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e3e1cbe4-589c-4fc9-89b9-116ec59bed87 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.655055] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Waiting for the task: (returnval){ [ 1048.655055] env[61594]: value = "task-1291451" [ 1048.655055] env[61594]: _type = "Task" [ 1048.655055] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.663740] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Task: {'id': task-1291451, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.166230] env[61594]: DEBUG oslo_vmware.exceptions [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1049.166230] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.166230] env[61594]: ERROR nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1049.166230] env[61594]: Faults: ['InvalidArgument'] [ 1049.166230] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Traceback (most recent call last): [ 1049.166230] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1049.166230] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] yield resources [ 1049.166230] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1049.166230] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self.driver.spawn(context, instance, image_meta, [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self._fetch_image_if_missing(context, vi) [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] image_cache(vi, tmp_image_ds_loc) [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] vm_util.copy_virtual_disk( [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] session._wait_for_task(vmdk_copy_task) [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] return self.wait_for_task(task_ref) [ 1049.166592] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] return evt.wait() [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] result = hub.switch() [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] return self.greenlet.switch() [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self.f(*self.args, **self.kw) [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] raise exceptions.translate_fault(task_info.error) [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Faults: ['InvalidArgument'] [ 1049.167153] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] [ 1049.167667] env[61594]: INFO nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Terminating instance [ 1049.167894] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.168079] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.168321] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b60b4e90-dc1c-474b-ae54-334f436aa9b9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.170591] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1049.170789] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1049.171505] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288d60bf-e9da-4547-ba3c-044386aff443 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.178283] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1049.178492] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5029368-57cb-4110-a48b-f04a45f5ac9d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.180611] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.180788] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1049.181732] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67d9b5dd-ad4a-4850-afb6-fca34c80c9b0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.186620] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1049.186620] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]521f7646-49db-d7c9-03ec-97416e45ba73" [ 1049.186620] env[61594]: _type = "Task" [ 1049.186620] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.202479] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1049.202702] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating directory with path [datastore1] vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.202905] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66469a7f-64fc-404e-b72c-abbe1d0c1a3a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.221749] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created directory with path [datastore1] vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.221972] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Fetch image to [datastore1] vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1049.222225] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1049.222968] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479d788c-b945-40b0-baf3-957e76622ae8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.229573] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64299dc6-36e6-45b9-85f3-99a93962e432 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.238856] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcad69ed-c63a-491e-be20-899ff68624fd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.270049] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-210d48b3-519d-40cf-85a8-3889e1251b92 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.272599] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1049.272801] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1049.272982] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Deleting the datastore file [datastore1] 23641d1e-bbca-4887-95c6-5a6cac0ce6a1 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.273244] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-027fbbca-4587-4c16-8d46-b3d20077f585 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.277904] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-0e66a7c3-a03a-48ad-955e-7c7db9aa3388 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.280518] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Waiting for the task: (returnval){ [ 1049.280518] env[61594]: value = "task-1291453" [ 1049.280518] env[61594]: _type = "Task" [ 1049.280518] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.287765] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Task: {'id': task-1291453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.308743] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1049.359217] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1049.419388] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1049.419552] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1049.543494] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.543664] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1049.790961] env[61594]: DEBUG oslo_vmware.api [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Task: {'id': task-1291453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.065866} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.791356] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.791450] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1049.791633] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1049.791806] env[61594]: INFO nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1049.793895] env[61594]: DEBUG nova.compute.claims [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1049.794102] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.794347] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.796162] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg daa0479d19574a8f8c5c8f5d2785ae2f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1049.836124] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg daa0479d19574a8f8c5c8f5d2785ae2f [ 1049.918274] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735b262b-ac06-487e-ae6f-166a2ec26286 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.925491] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5def9200-0e50-49f7-b736-c6ba2dc30c64 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.954519] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e75a229d-5c43-48d1-803f-8442f88e35ea {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.961252] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5613430a-4520-4af4-bb21-29eb82d7277d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.974101] env[61594]: DEBUG nova.compute.provider_tree [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.974604] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg a2d74b37d1d54dad9550d110ac276eb0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1049.981767] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a2d74b37d1d54dad9550d110ac276eb0 [ 1049.982657] env[61594]: DEBUG nova.scheduler.client.report [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1049.984817] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg b296f31b2b1e448ea644fa81da6df529 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1050.000249] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b296f31b2b1e448ea644fa81da6df529 [ 1050.000936] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.207s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.001462] env[61594]: ERROR nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1050.001462] env[61594]: Faults: ['InvalidArgument'] [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Traceback (most recent call last): [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self.driver.spawn(context, instance, image_meta, [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self._fetch_image_if_missing(context, vi) [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] image_cache(vi, tmp_image_ds_loc) [ 1050.001462] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] vm_util.copy_virtual_disk( [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] session._wait_for_task(vmdk_copy_task) [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] return self.wait_for_task(task_ref) [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] return evt.wait() [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] result = hub.switch() [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] return self.greenlet.switch() [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1050.001826] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] self.f(*self.args, **self.kw) [ 1050.002208] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1050.002208] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] raise exceptions.translate_fault(task_info.error) [ 1050.002208] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1050.002208] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Faults: ['InvalidArgument'] [ 1050.002208] env[61594]: ERROR nova.compute.manager [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] [ 1050.002208] env[61594]: DEBUG nova.compute.utils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1050.003757] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Build of instance 23641d1e-bbca-4887-95c6-5a6cac0ce6a1 was re-scheduled: A specified parameter was not correct: fileType [ 1050.003757] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1050.004445] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1050.004445] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1050.004445] env[61594]: DEBUG nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1050.004646] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1050.315177] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg d29a682e4cd1478fa156ff800788dc8f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1050.323893] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d29a682e4cd1478fa156ff800788dc8f [ 1050.324436] env[61594]: DEBUG nova.network.neutron [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.324895] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 4487359bfe4743fe993af84cd44406da in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1050.334520] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4487359bfe4743fe993af84cd44406da [ 1050.335149] env[61594]: INFO nova.compute.manager [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] [instance: 23641d1e-bbca-4887-95c6-5a6cac0ce6a1] Took 0.33 seconds to deallocate network for instance. [ 1050.336689] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 76d5305a8b3245c6965bdc546cb0533e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1050.369195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d5305a8b3245c6965bdc546cb0533e [ 1050.371671] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 902a3c86ef0f4c908ee15fb3ee02fb0a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1050.399953] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 902a3c86ef0f4c908ee15fb3ee02fb0a [ 1050.421603] env[61594]: INFO nova.scheduler.client.report [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Deleted allocations for instance 23641d1e-bbca-4887-95c6-5a6cac0ce6a1 [ 1050.427356] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Expecting reply to msg 0d7d5f58f09d42c88d714e9dfdd2448a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1050.441195] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0d7d5f58f09d42c88d714e9dfdd2448a [ 1050.441195] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9236c549-cb11-4864-85fa-a2883f24c113 tempest-ServerRescueTestJSON-513893860 tempest-ServerRescueTestJSON-513893860-project-member] Lock "23641d1e-bbca-4887-95c6-5a6cac0ce6a1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 178.616s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.540053] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.544047] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.544047] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1053.544047] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1053.544595] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 01423af04cbf477d89eeba8f8012c51c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1053.560150] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 01423af04cbf477d89eeba8f8012c51c [ 1053.562020] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1053.562020] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1053.562220] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1053.562333] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1053.562466] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1053.562592] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1053.563068] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.543195] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.543834] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 9bbcc6f96c094bbe8bb72682a96720ef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1054.556558] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9bbcc6f96c094bbe8bb72682a96720ef [ 1054.559186] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.559335] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.559418] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.559710] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 3916a81654694067be2af6f49f00975b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1054.568943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3916a81654694067be2af6f49f00975b [ 1054.569847] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.570077] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.570249] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.570405] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1054.571457] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cdeb14-749d-404b-98a2-be947d70b0b5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.580089] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8e1e65-b6a7-48e1-9187-92e03b5f1e55 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.595481] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bfdd39a-58f0-4dc0-aa9e-8316b2036f2d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.602084] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6143db04-e890-42e3-963f-ed4ae6b19484 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.631153] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181524MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1054.631153] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.631333] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.632016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f52f370e16ce47a69d4bbc8142af00a2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1054.654181] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f52f370e16ce47a69d4bbc8142af00a2 [ 1054.656641] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 4bfb4521c8144280975561071c87aa61 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1054.665888] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4bfb4521c8144280975561071c87aa61 [ 1054.686134] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.686294] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 299afd65-10d4-4602-9a7e-b5d12e88a823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.686425] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 1ac59594-4fbc-4a99-9e73-657185d4f218 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.686580] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 3a9e32f2-4300-4b44-ae16-67792000eb08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.686704] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1054.686883] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1054.687035] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1054.755709] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-834b6892-ac7f-404e-81fc-002c0ff71b22 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.763444] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7888d0b-b80e-4668-91ae-4e5888c0b788 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.793187] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c81dbd-13d0-4daa-a174-fe47ba02a63f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.800289] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572279e5-3436-4219-8b8f-aaae5e839c92 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.813358] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.813809] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg dd5e8f8d17c94e0c88d3084ca5b0bd3b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1054.821500] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd5e8f8d17c94e0c88d3084ca5b0bd3b [ 1054.822362] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1054.824624] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg d5b07a430bf84f869e4ef3399dc9ec9d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1054.837071] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d5b07a430bf84f869e4ef3399dc9ec9d [ 1054.837666] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1054.837843] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.207s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.886193] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 32a0009e161f40bfac213bcc3dbb5e98 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1054.895293] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32a0009e161f40bfac213bcc3dbb5e98 [ 1056.822839] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1076.647811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg e648e766342c4234968068fde7f2e801 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1076.657847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e648e766342c4234968068fde7f2e801 [ 1076.658406] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "1ac59594-4fbc-4a99-9e73-657185d4f218" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.709682] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg dbcaeed4191a4fe3aacbcd24de8273bd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1076.718940] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dbcaeed4191a4fe3aacbcd24de8273bd [ 1076.719393] env[61594]: DEBUG oslo_concurrency.lockutils [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "299afd65-10d4-4602-9a7e-b5d12e88a823" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1076.769998] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 9fd10d55202a4ac290d23a33c5d0c79a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1076.780124] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9fd10d55202a4ac290d23a33c5d0c79a [ 1076.780551] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1077.848843] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 22d3fe2fad8f48938b447893c742b1c9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1077.858026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 22d3fe2fad8f48938b447893c742b1c9 [ 1077.858228] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "3a9e32f2-4300-4b44-ae16-67792000eb08" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.036372] env[61594]: WARNING oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1095.036372] env[61594]: ERROR oslo_vmware.rw_handles [ 1095.037274] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1095.039071] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1095.039323] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Copying Virtual Disk [datastore1] vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/7b1d0a2a-4aa6-4d7f-898b-38c950f1cb93/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1095.039610] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cb7f07e3-cc39-4cdc-88d6-2182d1c7f782 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.047840] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1095.047840] env[61594]: value = "task-1291454" [ 1095.047840] env[61594]: _type = "Task" [ 1095.047840] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.055623] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.558114] env[61594]: DEBUG oslo_vmware.exceptions [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1095.558391] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1095.558929] env[61594]: ERROR nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1095.558929] env[61594]: Faults: ['InvalidArgument'] [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Traceback (most recent call last): [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] yield resources [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self.driver.spawn(context, instance, image_meta, [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self._fetch_image_if_missing(context, vi) [ 1095.558929] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] image_cache(vi, tmp_image_ds_loc) [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] vm_util.copy_virtual_disk( [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] session._wait_for_task(vmdk_copy_task) [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] return self.wait_for_task(task_ref) [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] return evt.wait() [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] result = hub.switch() [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1095.559407] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] return self.greenlet.switch() [ 1095.559757] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1095.559757] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self.f(*self.args, **self.kw) [ 1095.559757] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1095.559757] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] raise exceptions.translate_fault(task_info.error) [ 1095.559757] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1095.559757] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Faults: ['InvalidArgument'] [ 1095.559757] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] [ 1095.559757] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Terminating instance [ 1095.560858] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.561089] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1095.561330] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d887fbd4-2b4a-4ba5-bd71-fe5522a21adb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.563616] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1095.563809] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1095.564524] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2414f7-5a0d-4aa1-9984-da6dc92ccfe8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.571190] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1095.571391] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-254e652c-b198-4649-b6c0-6fc8fa033f0a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.457358] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1096.457643] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1096.457767] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleting the datastore file [datastore1] 1ac59594-4fbc-4a99-9e73-657185d4f218 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.458063] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-441f125e-afbf-4960-b67a-15fdbca01eb5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.464134] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1096.464134] env[61594]: value = "task-1291456" [ 1096.464134] env[61594]: _type = "Task" [ 1096.464134] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.471559] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.974253] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.475136] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.975804] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.476618] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.977551] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.019241] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.019403] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1099.020148] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b66df99-0e80-4351-bffd-5b25fdb12d5c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.025332] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1099.025332] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]522d74fc-c689-e217-f934-bdf4d619b962" [ 1099.025332] env[61594]: _type = "Task" [ 1099.025332] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.033963] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]522d74fc-c689-e217-f934-bdf4d619b962, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.482079] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 2.859346} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.482480] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1099.482533] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1099.482803] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1099.483108] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Took 3.92 seconds to destroy the instance on the hypervisor. [ 1099.486080] env[61594]: DEBUG nova.compute.claims [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1099.486378] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.486712] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1099.489700] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg d32eecd46e7e452dafdca8b24e2a7951 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1099.535756] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1099.536021] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating directory with path [datastore1] vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.536269] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2965faf6-915d-4aaf-a011-cbd4d7287b60 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.538882] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d32eecd46e7e452dafdca8b24e2a7951 [ 1099.548969] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created directory with path [datastore1] vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.549215] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Fetch image to [datastore1] vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1099.549394] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1099.550386] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01209829-0e3f-4f27-91c3-705ab4a494ea {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.557089] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3593c2b4-4220-4d0d-a576-94ca48f7b22e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.568339] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e2fc32c-77d8-4d50-a983-5930df041ada {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.602416] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4392e389-6b4c-4923-93eb-0a401c1ee4f2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.608745] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-39807a61-06ef-435f-80fc-ba69ff96bfef {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.630702] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1099.651193] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04af6eaf-2824-40ef-bed8-2a192e277693 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.660836] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33257d3-c529-4c7f-9b69-dfc835654c55 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.692528] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1099.694443] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6469f710-5e4f-420f-a01e-c45222e2be36 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.757586] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27281993-3944-4a64-9020-8a9a5bf750fb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.761982] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1099.762171] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1099.772477] env[61594]: DEBUG nova.compute.provider_tree [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.772861] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 0108d1191dab45a9b330b3ca51182ddd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1099.781057] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0108d1191dab45a9b330b3ca51182ddd [ 1099.782251] env[61594]: DEBUG nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1099.784641] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 1baee690a3fa4e7c80ce1673733c3509 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1099.797026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1baee690a3fa4e7c80ce1673733c3509 [ 1099.797571] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.311s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1099.798125] env[61594]: ERROR nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1099.798125] env[61594]: Faults: ['InvalidArgument'] [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Traceback (most recent call last): [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self.driver.spawn(context, instance, image_meta, [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self._fetch_image_if_missing(context, vi) [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] image_cache(vi, tmp_image_ds_loc) [ 1099.798125] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] vm_util.copy_virtual_disk( [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] session._wait_for_task(vmdk_copy_task) [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] return self.wait_for_task(task_ref) [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] return evt.wait() [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] result = hub.switch() [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] return self.greenlet.switch() [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1099.798434] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] self.f(*self.args, **self.kw) [ 1099.798996] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1099.798996] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] raise exceptions.translate_fault(task_info.error) [ 1099.798996] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1099.798996] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Faults: ['InvalidArgument'] [ 1099.798996] env[61594]: ERROR nova.compute.manager [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] [ 1099.798996] env[61594]: DEBUG nova.compute.utils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1099.800440] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Build of instance 1ac59594-4fbc-4a99-9e73-657185d4f218 was re-scheduled: A specified parameter was not correct: fileType [ 1099.800440] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1099.800814] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1099.800987] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1099.801194] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1099.801372] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1100.155874] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 6b738db59fd64f50a9ff429fb770b8ad in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.168465] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6b738db59fd64f50a9ff429fb770b8ad [ 1100.169073] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.169482] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 6d743d5c4e6b47ffb5eab09d376fd35b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.179775] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6d743d5c4e6b47ffb5eab09d376fd35b [ 1100.180517] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Took 0.38 seconds to deallocate network for instance. [ 1100.181976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 9a85cd2c52414b2b9b22fbcbb75df695 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.215137] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9a85cd2c52414b2b9b22fbcbb75df695 [ 1100.217659] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 402180e1efd744e5943e86566c81f693 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.248066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 402180e1efd744e5943e86566c81f693 [ 1100.269910] env[61594]: INFO nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleted allocations for instance 1ac59594-4fbc-4a99-9e73-657185d4f218 [ 1100.275832] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg e2e2bd1dd82d4d39ab718dda01e18056 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.290009] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2e2bd1dd82d4d39ab718dda01e18056 [ 1100.290518] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.108s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.291204] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 23.632s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.291204] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "1ac59594-4fbc-4a99-9e73-657185d4f218-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1100.291204] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1100.291464] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.293345] env[61594]: INFO nova.compute.manager [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Terminating instance [ 1100.295046] env[61594]: DEBUG nova.compute.manager [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1100.295245] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1100.295714] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6446e57e-68a7-4d06-9ddd-81c43c5eaca9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.306838] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06965f9-354c-437f-80b0-cde2e099f3f5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.329780] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1ac59594-4fbc-4a99-9e73-657185d4f218 could not be found. [ 1100.329977] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1100.330164] env[61594]: INFO nova.compute.manager [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1100.330399] env[61594]: DEBUG oslo.service.loopingcall [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1100.330604] env[61594]: DEBUG nova.compute.manager [-] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1100.330696] env[61594]: DEBUG nova.network.neutron [-] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1100.347823] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 3f904ae5e58741f6afed6903464b48ec in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.354931] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3f904ae5e58741f6afed6903464b48ec [ 1100.355291] env[61594]: DEBUG nova.network.neutron [-] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.355673] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9402d6bc414e403680485c1ee5b2ba56 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.364398] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9402d6bc414e403680485c1ee5b2ba56 [ 1100.364861] env[61594]: INFO nova.compute.manager [-] [instance: 1ac59594-4fbc-4a99-9e73-657185d4f218] Took 0.03 seconds to deallocate network for instance. [ 1100.368394] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg f2564a88615c4e98bda3e7672b372dd4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.395047] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f2564a88615c4e98bda3e7672b372dd4 [ 1100.407947] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg c5ef81706e7c48eb9a006ac916096eff in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.446642] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c5ef81706e7c48eb9a006ac916096eff [ 1100.449552] env[61594]: DEBUG oslo_concurrency.lockutils [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "1ac59594-4fbc-4a99-9e73-657185d4f218" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.159s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1100.449867] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-2e8d2349-ed12-4349-9613-e5640cb8295d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg f6b69b040843465481388754b317965b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1100.465054] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f6b69b040843465481388754b317965b [ 1110.543586] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.539252] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.543948] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1111.544313] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1113.545820] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.544278] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.544356] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.544809] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1115.544809] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1115.545377] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f223e79e43654dfa957fb7e309901cfb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1115.568118] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f223e79e43654dfa957fb7e309901cfb [ 1115.569664] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1115.569664] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1115.569861] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1115.569903] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1115.570039] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1115.570513] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.570699] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.571026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 7c03d9ccb6594d2a889767ff1c214842 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1115.579650] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7c03d9ccb6594d2a889767ff1c214842 [ 1115.580504] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.580710] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.580888] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.581051] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1115.582108] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d1f2ca-6b80-44fa-882a-574ad8db5d2c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.590761] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc91c8b1-05bf-408a-a855-eb8a33ff498a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.604111] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75713e0d-5988-4efe-a686-b6766811080d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.609949] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8962ce-f4f2-4b83-82ca-3079fa85316f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.638462] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181526MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1115.638596] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1115.638781] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1115.639541] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 745bfa068c2b4b7dae387f619ab706de in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1115.658768] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 745bfa068c2b4b7dae387f619ab706de [ 1115.660769] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 10e99826d5a94837862526cbf9af1fa4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1115.669544] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 10e99826d5a94837862526cbf9af1fa4 [ 1115.687760] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1115.687916] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 299afd65-10d4-4602-9a7e-b5d12e88a823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1115.688058] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 3a9e32f2-4300-4b44-ae16-67792000eb08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1115.688183] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1115.688357] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1115.688493] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1115.746111] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d3fc10-c05f-41a7-85b4-ca3e7b51e931 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.753764] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2ef0a1-e502-4f6b-99ff-b4bf24c95be7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.782959] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff2c87e-1b80-4870-b1c3-e092e7a0490d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.789955] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2eb5f42-3c72-4ebc-8dc2-a7a4516cc93f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.802547] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.802963] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg bc8f07cffd894d8e970659321fe54f6e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1115.810532] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bc8f07cffd894d8e970659321fe54f6e [ 1115.811381] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1115.813520] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg e9fdfb3a92f44d6daa50d1189c81eb01 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1115.824225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9fdfb3a92f44d6daa50d1189c81eb01 [ 1115.824844] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1115.825028] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.186s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.798714] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1148.942765] env[61594]: WARNING oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1148.942765] env[61594]: ERROR oslo_vmware.rw_handles [ 1148.943475] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1148.945661] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1148.945928] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Copying Virtual Disk [datastore1] vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/78e6acc1-70ef-42de-a74a-1af7730d97fc/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1148.946264] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b3c5cf2-99d6-4e87-869c-7045c860cebb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.956440] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1148.956440] env[61594]: value = "task-1291457" [ 1148.956440] env[61594]: _type = "Task" [ 1148.956440] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.964005] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291457, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.468502] env[61594]: DEBUG oslo_vmware.exceptions [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1149.468795] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1149.469380] env[61594]: ERROR nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1149.469380] env[61594]: Faults: ['InvalidArgument'] [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Traceback (most recent call last): [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] yield resources [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self.driver.spawn(context, instance, image_meta, [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self._fetch_image_if_missing(context, vi) [ 1149.469380] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] image_cache(vi, tmp_image_ds_loc) [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] vm_util.copy_virtual_disk( [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] session._wait_for_task(vmdk_copy_task) [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] return self.wait_for_task(task_ref) [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] return evt.wait() [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] result = hub.switch() [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1149.469807] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] return self.greenlet.switch() [ 1149.470378] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1149.470378] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self.f(*self.args, **self.kw) [ 1149.470378] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1149.470378] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] raise exceptions.translate_fault(task_info.error) [ 1149.470378] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1149.470378] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Faults: ['InvalidArgument'] [ 1149.470378] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] [ 1149.470378] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Terminating instance [ 1149.471280] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.471491] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1149.471735] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0ae324d6-a8f2-44c9-93a7-feb9184fdd73 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.473908] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1149.474112] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1149.474853] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2dc6ad-ad09-4cc7-b408-7e057634ed5b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.482150] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1149.482406] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73a0f7fa-04fd-44b6-a422-d187b3aa0303 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.484714] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1149.484882] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1149.485875] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26815524-6b8b-4974-8ed8-e356a885a0e8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.490506] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1149.490506] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52c4f7af-623e-b1bd-ddb2-43a0f13676c8" [ 1149.490506] env[61594]: _type = "Task" [ 1149.490506] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.497730] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52c4f7af-623e-b1bd-ddb2-43a0f13676c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.708858] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1149.709105] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1149.709290] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleting the datastore file [datastore1] cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.709669] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-534e278c-4f4e-47b4-ae10-f71cc4e77ab9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.717449] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1149.717449] env[61594]: value = "task-1291459" [ 1149.717449] env[61594]: _type = "Task" [ 1149.717449] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.726065] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.000581] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1150.001037] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating directory with path [datastore1] vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1150.001138] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a668d38-65ab-45f6-af73-e315c4c4a324 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.012052] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Created directory with path [datastore1] vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1150.012265] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Fetch image to [datastore1] vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1150.012454] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1150.013323] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49a09231-37ed-4ee0-a390-520798e16700 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.019635] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f1b9352-3355-4b77-98df-719002a385f5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.028357] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c5c8fd-b5a0-4140-8e4b-09af925a6770 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.058189] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25289251-2daf-4e8f-bc15-d06b93b1ea5a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.063411] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8fad9c57-5204-4dd3-be90-093c86900e9c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.081855] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1150.132215] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1150.192611] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1150.192814] env[61594]: DEBUG oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1150.227534] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.080192} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.227770] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1150.227949] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1150.228138] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1150.228315] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Took 0.75 seconds to destroy the instance on the hypervisor. [ 1150.230403] env[61594]: DEBUG nova.compute.claims [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1150.230586] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.230797] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.232695] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 5aad9711049c475f88eb9c8fe5f3e885 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.271807] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5aad9711049c475f88eb9c8fe5f3e885 [ 1150.328556] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e86a42-9269-4709-9d1d-4c8e59f693d8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.335620] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b631b30-3089-424b-a10a-61440f4f9449 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.364625] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9732ba24-cd4b-43e2-b060-16b56967e126 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.371245] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5160ea2-befb-451b-a4b0-138c2c61418f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.385216] env[61594]: DEBUG nova.compute.provider_tree [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1150.385735] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 6ca2cd0a6e2a4ffb9ff95c78fdd0d676 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.394075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6ca2cd0a6e2a4ffb9ff95c78fdd0d676 [ 1150.394945] env[61594]: DEBUG nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1150.397112] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 88a5ea66b24a41cbaa076a4359846af6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.407313] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 88a5ea66b24a41cbaa076a4359846af6 [ 1150.407948] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.177s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.408478] env[61594]: ERROR nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1150.408478] env[61594]: Faults: ['InvalidArgument'] [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Traceback (most recent call last): [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self.driver.spawn(context, instance, image_meta, [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self._fetch_image_if_missing(context, vi) [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] image_cache(vi, tmp_image_ds_loc) [ 1150.408478] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] vm_util.copy_virtual_disk( [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] session._wait_for_task(vmdk_copy_task) [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] return self.wait_for_task(task_ref) [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] return evt.wait() [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] result = hub.switch() [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] return self.greenlet.switch() [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1150.408802] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] self.f(*self.args, **self.kw) [ 1150.409121] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1150.409121] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] raise exceptions.translate_fault(task_info.error) [ 1150.409121] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1150.409121] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Faults: ['InvalidArgument'] [ 1150.409121] env[61594]: ERROR nova.compute.manager [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] [ 1150.409333] env[61594]: DEBUG nova.compute.utils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1150.410512] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Build of instance cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 was re-scheduled: A specified parameter was not correct: fileType [ 1150.410512] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1150.410888] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1150.411074] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1150.411249] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1150.411413] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1150.658589] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 7141b82bba824432a91f13f40df025ce in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.671233] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7141b82bba824432a91f13f40df025ce [ 1150.671233] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.671379] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg b937178b49144d8eadb78dba59ad429d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.685014] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b937178b49144d8eadb78dba59ad429d [ 1150.685014] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Took 0.27 seconds to deallocate network for instance. [ 1150.686648] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 9403ad188c6342928d2b9f929cbaf14d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.717593] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9403ad188c6342928d2b9f929cbaf14d [ 1150.720225] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 37c214cc9667479c8fd511828d37c7fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.751431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 37c214cc9667479c8fd511828d37c7fe [ 1150.780330] env[61594]: INFO nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleted allocations for instance cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 [ 1150.786239] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg cd32bf0cd3624ce883b73d7e14880d5e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.802082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cd32bf0cd3624ce883b73d7e14880d5e [ 1150.803108] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 270.769s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.803108] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 74.022s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.803428] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.803428] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1150.803570] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.805904] env[61594]: INFO nova.compute.manager [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Terminating instance [ 1150.807698] env[61594]: DEBUG nova.compute.manager [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1150.808312] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1150.808808] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91142aaf-7322-408d-900f-37f3f0806ae2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.819098] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5941330-a98c-4f5a-bb09-5c510a99a41d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.841550] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cbad69d3-7908-4f15-94c5-1b2c2f4e79f1 could not be found. [ 1150.841800] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1150.841983] env[61594]: INFO nova.compute.manager [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1150.842239] env[61594]: DEBUG oslo.service.loopingcall [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1150.842445] env[61594]: DEBUG nova.compute.manager [-] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1150.842542] env[61594]: DEBUG nova.network.neutron [-] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1150.864826] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg bbc4ec6014bf41efbeada645fccde959 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.870436] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bbc4ec6014bf41efbeada645fccde959 [ 1150.870806] env[61594]: DEBUG nova.network.neutron [-] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.871188] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg e6992b3581d34898a9e8d534bb1a4af6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.878925] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e6992b3581d34898a9e8d534bb1a4af6 [ 1150.879409] env[61594]: INFO nova.compute.manager [-] [instance: cbad69d3-7908-4f15-94c5-1b2c2f4e79f1] Took 0.04 seconds to deallocate network for instance. [ 1150.883310] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 5df84a7ab1b14bf2bbdd548cd30c8323 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.909383] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5df84a7ab1b14bf2bbdd548cd30c8323 [ 1150.923581] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg f3ee99d90b4d47299d2b7099aa7029ee in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.961545] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f3ee99d90b4d47299d2b7099aa7029ee [ 1150.964655] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "cbad69d3-7908-4f15-94c5-1b2c2f4e79f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.162s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.964970] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9cd7c250-6f87-4aba-aa25-c57779d2661d tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 2ec67210321b4944b0477616efa4daf9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1150.975284] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2ec67210321b4944b0477616efa4daf9 [ 1153.991625] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg d24c5874d3c241c1aea0f95b37b15744 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1154.001035] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d24c5874d3c241c1aea0f95b37b15744 [ 1154.001485] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1167.544147] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.544432] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Cleaning up deleted instances {{(pid=61594) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11219}} [ 1167.545016] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 85dad891859241859c4dae8a105b8cd4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1167.557228] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 85dad891859241859c4dae8a105b8cd4 [ 1167.557765] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] There are 0 instances to clean {{(pid=61594) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1167.557968] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.558118] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Cleaning up deleted instances with incomplete migration {{(pid=61594) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11257}} [ 1167.558390] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 2c54dcd057f14395a051259490a5687c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1167.567427] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2c54dcd057f14395a051259490a5687c [ 1170.544206] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.544571] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.544799] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg e9817db7823b471caa1e768233019b49 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1170.552535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9817db7823b471caa1e768233019b49 [ 1171.548513] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.544499] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.544689] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1174.544109] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.544532] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.887184] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 9ce892e2eec14628ad007625dbc60dfb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1174.898884] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9ce892e2eec14628ad007625dbc60dfb [ 1176.540502] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.541155] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg c326da30eef04027a13960396e348b36 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1176.552385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c326da30eef04027a13960396e348b36 [ 1176.554289] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.554476] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.543914] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.544346] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1177.544346] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1177.544877] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 1089a8388de746e7b511311de88a7040 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1177.555581] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1089a8388de746e7b511311de88a7040 [ 1177.556724] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1177.556874] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1177.557013] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1177.557152] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1177.557638] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.557941] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 9b90e67826444e63aa8ad5fba8cff0a0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1177.567516] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9b90e67826444e63aa8ad5fba8cff0a0 [ 1177.568340] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.568548] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.568712] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1177.568863] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1177.569902] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab74ec1-c4d4-4f84-b257-4af120ea0eda {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.578430] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9af2558-aebe-4099-9ee5-543b8f8ba51b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.592169] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d4ad0b-89e9-4452-84c7-8f03ca466965 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.597974] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcaaf794-944d-4988-b5f0-0aaabda9a079 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.626560] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181516MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1177.626560] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1177.626560] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1177.627373] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg ebdeeaa239974a3e83877ebe637b985a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1177.645531] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ebdeeaa239974a3e83877ebe637b985a [ 1177.647323] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg ae19fa2e5784454baa413e42cc41a3d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1177.655242] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ae19fa2e5784454baa413e42cc41a3d5 [ 1177.753466] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 299afd65-10d4-4602-9a7e-b5d12e88a823 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.753616] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 3a9e32f2-4300-4b44-ae16-67792000eb08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.753756] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1177.753912] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1177.754071] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1177.770433] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Refreshing inventories for resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1177.783877] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Updating ProviderTree inventory for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1177.784098] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Updating inventory in ProviderTree for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1177.794665] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Refreshing aggregate associations for resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be, aggregates: None {{(pid=61594) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1177.811842] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Refreshing trait associations for resource provider f0ff3a26-85e8-47dd-b241-86a582e8d4be, traits: COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE {{(pid=61594) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1177.858380] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-267a6b11-091d-40ee-ab0f-26fbd6a524e8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.866374] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282fa885-9740-44f5-8879-02ed441983c3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.896354] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e70bab1d-fe6e-40ff-b1f1-d688e81918bc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.903581] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4915fc0c-83fc-41dc-9a66-ab267d1083a9 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.917307] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.917736] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 249a3808a5e647bda8c4b108b1b259af in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1177.925169] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 249a3808a5e647bda8c4b108b1b259af [ 1177.925991] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1177.928173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f080fcaeddbe45399153b10bf1bf2b37 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1177.939491] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f080fcaeddbe45399153b10bf1bf2b37 [ 1177.940118] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1177.940300] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.314s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1200.072522] env[61594]: WARNING oslo_vmware.rw_handles [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1200.072522] env[61594]: ERROR oslo_vmware.rw_handles [ 1200.073156] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1200.075519] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1200.075774] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Copying Virtual Disk [datastore1] vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/77cf8ac0-68fd-4f08-8c9d-cf9245d9d6e5/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1200.076119] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82034944-a113-49a8-ac97-dd3120866279 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.083973] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1200.083973] env[61594]: value = "task-1291460" [ 1200.083973] env[61594]: _type = "Task" [ 1200.083973] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.091534] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291460, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.594276] env[61594]: DEBUG oslo_vmware.exceptions [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1200.594575] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1200.595131] env[61594]: ERROR nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1200.595131] env[61594]: Faults: ['InvalidArgument'] [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Traceback (most recent call last): [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] yield resources [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self.driver.spawn(context, instance, image_meta, [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self._fetch_image_if_missing(context, vi) [ 1200.595131] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] image_cache(vi, tmp_image_ds_loc) [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] vm_util.copy_virtual_disk( [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] session._wait_for_task(vmdk_copy_task) [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] return self.wait_for_task(task_ref) [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] return evt.wait() [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] result = hub.switch() [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1200.595572] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] return self.greenlet.switch() [ 1200.595940] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1200.595940] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self.f(*self.args, **self.kw) [ 1200.595940] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1200.595940] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] raise exceptions.translate_fault(task_info.error) [ 1200.595940] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1200.595940] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Faults: ['InvalidArgument'] [ 1200.595940] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] [ 1200.595940] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Terminating instance [ 1200.597018] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.597223] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1200.597454] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f4d6e743-1a9d-48a4-8ad2-449f16552e95 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.599525] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1200.599721] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1200.600424] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba18999b-1d19-4264-85ba-ef33312c70f7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.607013] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1200.607228] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9f637cd-b6be-4b09-9c91-c0d54f9e06dd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.609186] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1200.609361] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1200.610268] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8e8b973-1a08-4050-a8a4-0121bcacdde5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.614603] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Waiting for the task: (returnval){ [ 1200.614603] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]526c6980-1795-9c83-3b16-7fcda8c44cd7" [ 1200.614603] env[61594]: _type = "Task" [ 1200.614603] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.625726] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]526c6980-1795-9c83-3b16-7fcda8c44cd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.687111] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1200.687336] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1200.687518] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleting the datastore file [datastore1] 299afd65-10d4-4602-9a7e-b5d12e88a823 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1200.687820] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da33c18a-87fd-48a0-8948-b234d8ff603b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.693150] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for the task: (returnval){ [ 1200.693150] env[61594]: value = "task-1291462" [ 1200.693150] env[61594]: _type = "Task" [ 1200.693150] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.701547] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.126049] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1201.126049] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Creating directory with path [datastore1] vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1201.126049] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b223038-755b-4b27-8759-3ae4abe6ba22 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.785473] env[61594]: DEBUG oslo_vmware.api [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Task: {'id': task-1291462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074288} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.785726] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1201.785947] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1201.786113] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1201.786309] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1201.788400] env[61594]: DEBUG nova.compute.claims [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1201.788567] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1201.788777] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1201.790603] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg dd640ded6d034f7ab42bb0770ba5ad2e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1201.792333] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Created directory with path [datastore1] vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1201.792517] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Fetch image to [datastore1] vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1201.792685] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1201.793567] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec054b0f-86c3-4686-8b89-4c03359b5cbe {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.799892] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2588adaa-5536-4945-9851-6c5dddb7ce81 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.808474] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a99993f6-e2e0-4461-8e49-60e6cb4e7143 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.839224] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49724bbb-b926-49e3-b011-650ba1e9cb65 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.841729] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg dd640ded6d034f7ab42bb0770ba5ad2e [ 1201.848245] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-981be71f-7a2a-4e2e-be20-4fca21a5bfe2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.868369] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1201.892279] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c13f350-106c-407f-84e8-55930ebbc36b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.902254] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b8440b5-629e-402e-a539-4082490b83b8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.933215] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bedcceb-c363-4614-8ddd-f12f48a7129e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.940144] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19b343b4-59c9-49d5-aba5-d381379346d3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.952492] env[61594]: DEBUG nova.compute.provider_tree [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1201.952972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 36d23236f8d644a2989f5d1ccf191cf8 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1201.954581] env[61594]: DEBUG oslo_vmware.rw_handles [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1202.010452] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 36d23236f8d644a2989f5d1ccf191cf8 [ 1202.011413] env[61594]: DEBUG nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1202.013667] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 868cb9133f3d4f429673bb5bb82d656a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.016738] env[61594]: DEBUG oslo_vmware.rw_handles [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1202.016738] env[61594]: DEBUG oslo_vmware.rw_handles [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1202.025567] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 868cb9133f3d4f429673bb5bb82d656a [ 1202.026488] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.238s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.027103] env[61594]: ERROR nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1202.027103] env[61594]: Faults: ['InvalidArgument'] [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Traceback (most recent call last): [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self.driver.spawn(context, instance, image_meta, [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self._fetch_image_if_missing(context, vi) [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] image_cache(vi, tmp_image_ds_loc) [ 1202.027103] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] vm_util.copy_virtual_disk( [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] session._wait_for_task(vmdk_copy_task) [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] return self.wait_for_task(task_ref) [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] return evt.wait() [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] result = hub.switch() [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] return self.greenlet.switch() [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1202.027432] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] self.f(*self.args, **self.kw) [ 1202.027747] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1202.027747] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] raise exceptions.translate_fault(task_info.error) [ 1202.027747] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1202.027747] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Faults: ['InvalidArgument'] [ 1202.027747] env[61594]: ERROR nova.compute.manager [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] [ 1202.027871] env[61594]: DEBUG nova.compute.utils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1202.029519] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Build of instance 299afd65-10d4-4602-9a7e-b5d12e88a823 was re-scheduled: A specified parameter was not correct: fileType [ 1202.029519] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1202.029948] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1202.030175] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1202.030383] env[61594]: DEBUG nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1202.030580] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1202.382281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 3a5935d221884a28b6560f438af10c65 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.391255] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 3a5935d221884a28b6560f438af10c65 [ 1202.391829] env[61594]: DEBUG nova.network.neutron [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.392535] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 2749c226ba57430b99c0039d7b9aadd4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.403723] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2749c226ba57430b99c0039d7b9aadd4 [ 1202.404332] env[61594]: INFO nova.compute.manager [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Took 0.37 seconds to deallocate network for instance. [ 1202.406023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 44250c94c5d04eba98c726378cb3e234 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.441026] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44250c94c5d04eba98c726378cb3e234 [ 1202.443564] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg b741205a32bb49f3921a4d7c5cd053c4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.476024] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b741205a32bb49f3921a4d7c5cd053c4 [ 1202.497054] env[61594]: INFO nova.scheduler.client.report [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Deleted allocations for instance 299afd65-10d4-4602-9a7e-b5d12e88a823 [ 1202.505403] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 87385e03277245428c7f8453c63d1570 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.523218] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 87385e03277245428c7f8453c63d1570 [ 1202.523880] env[61594]: DEBUG oslo_concurrency.lockutils [None req-4917383e-606f-43c5-bef6-828c7aa83d6f tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 322.440s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.524169] env[61594]: DEBUG oslo_concurrency.lockutils [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 125.805s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.524429] env[61594]: DEBUG oslo_concurrency.lockutils [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Acquiring lock "299afd65-10d4-4602-9a7e-b5d12e88a823-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1202.524644] env[61594]: DEBUG oslo_concurrency.lockutils [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1202.524842] env[61594]: DEBUG oslo_concurrency.lockutils [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.527140] env[61594]: INFO nova.compute.manager [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Terminating instance [ 1202.529495] env[61594]: DEBUG nova.compute.manager [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1202.530226] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1202.530811] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ade02dd-6585-45f1-8ca5-f50639814b5e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.540658] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b30a9d-9cef-4ad5-923e-a92508bf0b40 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.564333] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 299afd65-10d4-4602-9a7e-b5d12e88a823 could not be found. [ 1202.564552] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1202.564733] env[61594]: INFO nova.compute.manager [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1202.564972] env[61594]: DEBUG oslo.service.loopingcall [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1202.565186] env[61594]: DEBUG nova.compute.manager [-] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1202.565282] env[61594]: DEBUG nova.network.neutron [-] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1202.583148] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 5d68c15b86c44a99b0388009bdb61151 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.588924] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5d68c15b86c44a99b0388009bdb61151 [ 1202.589250] env[61594]: DEBUG nova.network.neutron [-] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.589612] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 44025eb890e94ed4be082a68a6a085dd in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.597860] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 44025eb890e94ed4be082a68a6a085dd [ 1202.598307] env[61594]: INFO nova.compute.manager [-] [instance: 299afd65-10d4-4602-9a7e-b5d12e88a823] Took 0.03 seconds to deallocate network for instance. [ 1202.601744] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg da7338d5a54d4b698f6efd1d991c8b29 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.625772] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg da7338d5a54d4b698f6efd1d991c8b29 [ 1202.639445] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 476a4708218248debfdbd981dd7c8906 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.676929] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 476a4708218248debfdbd981dd7c8906 [ 1202.679674] env[61594]: DEBUG oslo_concurrency.lockutils [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Lock "299afd65-10d4-4602-9a7e-b5d12e88a823" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.155s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1202.679986] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-87062919-2810-4580-b6f2-0dfbf43cb31a tempest-ListServersNegativeTestJSON-267653004 tempest-ListServersNegativeTestJSON-267653004-project-member] Expecting reply to msg 57df9d27ee914f4d8b097561ae4f3837 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1202.689731] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 57df9d27ee914f4d8b097561ae4f3837 [ 1208.845677] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquiring lock "0e943fcd-8c20-4835-9e43-f636ecf73366" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.845968] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Lock "0e943fcd-8c20-4835-9e43-f636ecf73366" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.846431] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 6734da2c3c044f96847e7124be2c3866 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1208.857055] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6734da2c3c044f96847e7124be2c3866 [ 1208.857502] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Starting instance... {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2423}} [ 1208.859140] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg b8a604554269448291e77c3258f76135 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1208.887994] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b8a604554269448291e77c3258f76135 [ 1208.903436] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1208.903682] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1208.905555] env[61594]: INFO nova.compute.claims [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1208.907132] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 037448f8b4c24bcd9bad43356ed56e31 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1208.938138] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 037448f8b4c24bcd9bad43356ed56e31 [ 1208.939757] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg a60434a91e90498eadf6b66575fb054d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1208.949102] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a60434a91e90498eadf6b66575fb054d [ 1209.001507] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c55410da-342a-4a57-9c3a-b77098c60ab4 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.009566] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1677b800-699f-4869-aef9-375aec2dbdaf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.040571] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c84817-32d3-4f23-b2c9-3ceaf8a38bf7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.047543] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433689ac-88e2-45a8-b9ef-43a5b349f4e2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.060783] env[61594]: DEBUG nova.compute.provider_tree [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.061811] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg e23627edbe8c48d3a6a7d83dd0a5e197 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1209.068621] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e23627edbe8c48d3a6a7d83dd0a5e197 [ 1209.069655] env[61594]: DEBUG nova.scheduler.client.report [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1209.071863] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 4265d1cd0625423bacc22342cb9d3f05 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1209.085357] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4265d1cd0625423bacc22342cb9d3f05 [ 1209.086049] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.182s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1209.086536] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Start building networks asynchronously for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2820}} [ 1209.088116] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg a49b9f644c4b4dd18e45b321d6173a3d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1209.115848] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a49b9f644c4b4dd18e45b321d6173a3d [ 1209.117027] env[61594]: DEBUG nova.compute.utils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Using /dev/sd instead of None {{(pid=61594) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1209.117613] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 8d0502a048364eb18fbd768b367628f1 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1209.118409] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Allocating IP information in the background. {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1972}} [ 1209.118615] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] allocate_for_instance() {{(pid=61594) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1209.126134] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8d0502a048364eb18fbd768b367628f1 [ 1209.126686] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Start building block device mappings for instance. {{(pid=61594) _build_resources /opt/stack/nova/nova/compute/manager.py:2855}} [ 1209.128236] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 6786a8248a19439c840f1236e0aee157 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1209.156341] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6786a8248a19439c840f1236e0aee157 [ 1209.159090] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 53166b038b7341cdb6f63cb6f32b2832 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1209.175896] env[61594]: DEBUG nova.policy [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '13db58c4a3cb4f859e21d2c98e77afde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ba125043b4bc4657b0fd2402613ebd93', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=61594) authorize /opt/stack/nova/nova/policy.py:203}} [ 1209.198852] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 53166b038b7341cdb6f63cb6f32b2832 [ 1209.200021] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Start spawning the instance on the hypervisor. {{(pid=61594) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2629}} [ 1209.227658] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-15T15:25:47Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-15T15:25:32Z,direct_url=,disk_format='vmdk',id=9b91196f-102b-4380-9e69-c9f71c27118a,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='44ccca2003e24f318decb35298c62305',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-15T15:25:33Z,virtual_size=,visibility=), allow threads: False {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1209.227934] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Flavor limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1209.228121] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Image limits 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1209.228314] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Flavor pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1209.228464] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Image pref 0:0:0 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1209.228615] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=61594) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1209.228831] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1209.228993] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1209.229178] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Got 1 possible topologies {{(pid=61594) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1209.229343] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1209.229519] env[61594]: DEBUG nova.virt.hardware [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=61594) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1209.230409] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc3e31d3-5323-425f-8015-37d884b4a83e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.238545] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce46759f-bad0-4b82-a285-d26fa86906b5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.479830] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Successfully created port: 6eafeca9-d7b1-4517-b969-e66387b65e6e {{(pid=61594) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1210.290894] env[61594]: DEBUG nova.compute.manager [req-9ce1f707-ddeb-4186-88c7-828b695d07ba req-8878c471-0c91-4878-8296-1842e7c272a0 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Received event network-vif-plugged-6eafeca9-d7b1-4517-b969-e66387b65e6e {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1210.291196] env[61594]: DEBUG oslo_concurrency.lockutils [req-9ce1f707-ddeb-4186-88c7-828b695d07ba req-8878c471-0c91-4878-8296-1842e7c272a0 service nova] Acquiring lock "0e943fcd-8c20-4835-9e43-f636ecf73366-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1210.291196] env[61594]: DEBUG oslo_concurrency.lockutils [req-9ce1f707-ddeb-4186-88c7-828b695d07ba req-8878c471-0c91-4878-8296-1842e7c272a0 service nova] Lock "0e943fcd-8c20-4835-9e43-f636ecf73366-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1210.291338] env[61594]: DEBUG oslo_concurrency.lockutils [req-9ce1f707-ddeb-4186-88c7-828b695d07ba req-8878c471-0c91-4878-8296-1842e7c272a0 service nova] Lock "0e943fcd-8c20-4835-9e43-f636ecf73366-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1210.291506] env[61594]: DEBUG nova.compute.manager [req-9ce1f707-ddeb-4186-88c7-828b695d07ba req-8878c471-0c91-4878-8296-1842e7c272a0 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] No waiting events found dispatching network-vif-plugged-6eafeca9-d7b1-4517-b969-e66387b65e6e {{(pid=61594) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1210.291671] env[61594]: WARNING nova.compute.manager [req-9ce1f707-ddeb-4186-88c7-828b695d07ba req-8878c471-0c91-4878-8296-1842e7c272a0 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Received unexpected event network-vif-plugged-6eafeca9-d7b1-4517-b969-e66387b65e6e for instance with vm_state building and task_state spawning. [ 1210.369600] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Successfully updated port: 6eafeca9-d7b1-4517-b969-e66387b65e6e {{(pid=61594) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1210.369600] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg f52443829f3f4f9e9baf8d0cdd257eeb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1210.379353] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f52443829f3f4f9e9baf8d0cdd257eeb [ 1210.380131] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquiring lock "refresh_cache-0e943fcd-8c20-4835-9e43-f636ecf73366" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1210.380131] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquired lock "refresh_cache-0e943fcd-8c20-4835-9e43-f636ecf73366" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.380294] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1210.380681] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 26e824c5d224448d90a168d2061d4fe9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1210.387772] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 26e824c5d224448d90a168d2061d4fe9 [ 1210.423102] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1210.582772] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Updating instance_info_cache with network_info: [{"id": "6eafeca9-d7b1-4517-b969-e66387b65e6e", "address": "fa:16:3e:88:01:0e", "network": {"id": "8d7a6eb5-ea81-4f3b-826d-8dc8327c821c", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1158668060-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba125043b4bc4657b0fd2402613ebd93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6eafeca9-d7", "ovs_interfaceid": "6eafeca9-d7b1-4517-b969-e66387b65e6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.583363] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 32d4606d4668438f98ecab4eda8652bf in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1210.595747] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 32d4606d4668438f98ecab4eda8652bf [ 1210.596553] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Releasing lock "refresh_cache-0e943fcd-8c20-4835-9e43-f636ecf73366" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1210.596921] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Instance network_info: |[{"id": "6eafeca9-d7b1-4517-b969-e66387b65e6e", "address": "fa:16:3e:88:01:0e", "network": {"id": "8d7a6eb5-ea81-4f3b-826d-8dc8327c821c", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1158668060-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba125043b4bc4657b0fd2402613ebd93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6eafeca9-d7", "ovs_interfaceid": "6eafeca9-d7b1-4517-b969-e66387b65e6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=61594) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1987}} [ 1210.597466] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:01:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6eafeca9-d7b1-4517-b969-e66387b65e6e', 'vif_model': 'vmxnet3'}] {{(pid=61594) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1210.609442] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Creating folder: Project (ba125043b4bc4657b0fd2402613ebd93). Parent ref: group-v277030. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1210.610101] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d92380d8-7649-4372-9035-8df6500913c2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.623375] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Created folder: Project (ba125043b4bc4657b0fd2402613ebd93) in parent group-v277030. [ 1210.623618] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Creating folder: Instances. Parent ref: group-v277072. {{(pid=61594) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1210.623894] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a26e44a-a227-458a-b013-6caf27722c2c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.632828] env[61594]: INFO nova.virt.vmwareapi.vm_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Created folder: Instances in parent group-v277072. [ 1210.633076] env[61594]: DEBUG oslo.service.loopingcall [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1210.633259] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Creating VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1210.633446] env[61594]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9aaf05b-eb76-4527-bbfc-38add81f5f4a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.652923] env[61594]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1210.652923] env[61594]: value = "task-1291465" [ 1210.652923] env[61594]: _type = "Task" [ 1210.652923] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.661034] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291465, 'name': CreateVM_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.162730] env[61594]: DEBUG oslo_vmware.api [-] Task: {'id': task-1291465, 'name': CreateVM_Task, 'duration_secs': 0.318172} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.162901] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Created VM on the ESX host {{(pid=61594) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1211.163692] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1211.163868] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.164210] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1211.164456] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f911f3bf-e252-43a6-aac1-1e7193010f80 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.168772] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Waiting for the task: (returnval){ [ 1211.168772] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52243327-5865-30c2-9ea9-39f071a807b1" [ 1211.168772] env[61594]: _type = "Task" [ 1211.168772] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.175702] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52243327-5865-30c2-9ea9-39f071a807b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.678827] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1211.679204] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Processing image 9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1211.679294] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.267548] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_power_states {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1212.268385] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 8eb5bfb2b81e45809bee102cadc8ffb9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1212.280107] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8eb5bfb2b81e45809bee102cadc8ffb9 [ 1212.281397] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Getting list of instances from cluster (obj){ [ 1212.281397] env[61594]: value = "domain-c8" [ 1212.281397] env[61594]: _type = "ClusterComputeResource" [ 1212.281397] env[61594]: } {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1212.282619] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6def3885-52d4-4c58-b7ba-1044cc78e0e2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.293547] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Got total of 3 instances {{(pid=61594) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1212.293707] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Triggering sync for uuid 3a9e32f2-4300-4b44-ae16-67792000eb08 {{(pid=61594) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.293893] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Triggering sync for uuid 6d514620-06f3-4bd9-8d74-3f11a064ef58 {{(pid=61594) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.294071] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Triggering sync for uuid 0e943fcd-8c20-4835-9e43-f636ecf73366 {{(pid=61594) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10329}} [ 1212.294355] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "3a9e32f2-4300-4b44-ae16-67792000eb08" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.294582] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.294786] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "0e943fcd-8c20-4835-9e43-f636ecf73366" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1212.319968] env[61594]: DEBUG nova.compute.manager [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Received event network-changed-6eafeca9-d7b1-4517-b969-e66387b65e6e {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11122}} [ 1212.319968] env[61594]: DEBUG nova.compute.manager [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Refreshing instance network info cache due to event network-changed-6eafeca9-d7b1-4517-b969-e66387b65e6e. {{(pid=61594) external_instance_event /opt/stack/nova/nova/compute/manager.py:11127}} [ 1212.319968] env[61594]: DEBUG oslo_concurrency.lockutils [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] Acquiring lock "refresh_cache-0e943fcd-8c20-4835-9e43-f636ecf73366" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1212.319968] env[61594]: DEBUG oslo_concurrency.lockutils [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] Acquired lock "refresh_cache-0e943fcd-8c20-4835-9e43-f636ecf73366" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1212.319968] env[61594]: DEBUG nova.network.neutron [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Refreshing network info cache for port 6eafeca9-d7b1-4517-b969-e66387b65e6e {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1212.320404] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] Expecting reply to msg bd524b53678242e88dcb6ce42960f74a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1212.327391] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bd524b53678242e88dcb6ce42960f74a [ 1212.634524] env[61594]: DEBUG nova.network.neutron [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Updated VIF entry in instance network info cache for port 6eafeca9-d7b1-4517-b969-e66387b65e6e. {{(pid=61594) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1212.634887] env[61594]: DEBUG nova.network.neutron [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Updating instance_info_cache with network_info: [{"id": "6eafeca9-d7b1-4517-b969-e66387b65e6e", "address": "fa:16:3e:88:01:0e", "network": {"id": "8d7a6eb5-ea81-4f3b-826d-8dc8327c821c", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1158668060-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ba125043b4bc4657b0fd2402613ebd93", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6eafeca9-d7", "ovs_interfaceid": "6eafeca9-d7b1-4517-b969-e66387b65e6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.635427] env[61594]: INFO oslo_messaging._drivers.amqpdriver [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] Expecting reply to msg 15555b066cbf4ddbb1603e6a1770ee47 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1212.643847] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 15555b066cbf4ddbb1603e6a1770ee47 [ 1212.644419] env[61594]: DEBUG oslo_concurrency.lockutils [req-6bbe5181-23ab-49f8-917d-77b47f430bf2 req-44fac39a-f5f3-4e96-9a19-f032e32d5cd5 service nova] Releasing lock "refresh_cache-0e943fcd-8c20-4835-9e43-f636ecf73366" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1232.573090] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.539809] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.543204] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.543567] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.543616] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.543732] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1236.544472] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1236.544740] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.544528] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.545009] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg f711b378c98241c59e21f5be73bd29df in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1237.556245] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg f711b378c98241c59e21f5be73bd29df [ 1237.557359] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.557580] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.557754] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1237.557916] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1237.559229] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48e1d614-1263-4e3c-9933-77e0afab58e6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.568205] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a007a168-7d34-48a2-9e62-0f89ed50d3cb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.582703] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786eb816-77eb-4340-bfd1-f72327ec6365 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.588604] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc90526-c81a-4013-9979-0a21668608cf {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.616774] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181471MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1237.616933] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1237.617163] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1237.617953] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg b157a4cfa8564eed87067ea228bf884f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1237.636775] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b157a4cfa8564eed87067ea228bf884f [ 1237.638590] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg c085ef2bf6034753958617275710afe5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1237.647326] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c085ef2bf6034753958617275710afe5 [ 1237.666161] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 3a9e32f2-4300-4b44-ae16-67792000eb08 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.666318] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.666453] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 0e943fcd-8c20-4835-9e43-f636ecf73366 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1237.666630] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1237.666767] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1237.714357] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c152cdf-1f08-40aa-b21e-497b1a2153fd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.721402] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c000ba1-1da2-4079-85e0-93c5aeb4b85e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.750016] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db56587-f8be-4404-8da7-6fab749c30a5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.756528] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d457dc0-3558-42c6-bfb8-7247324a8019 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.770141] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1237.770540] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 0469122dd7c946b68a78e6542bdfcf2b in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1237.777129] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 0469122dd7c946b68a78e6542bdfcf2b [ 1237.777930] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1237.779981] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 7ce1a2467d214f94a4069dea2a9ba98a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1237.789826] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7ce1a2467d214f94a4069dea2a9ba98a [ 1237.790419] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1237.790587] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.173s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.791134] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1239.791514] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1239.791514] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1239.792017] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg a5a517815a69454aaa657be7d373ca26 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1239.803893] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a5a517815a69454aaa657be7d373ca26 [ 1239.805087] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1239.805246] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1239.805378] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1239.805507] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1248.715765] env[61594]: WARNING oslo_vmware.rw_handles [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.715765] env[61594]: ERROR oslo_vmware.rw_handles [ 1248.716612] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1248.718926] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1248.719283] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Copying Virtual Disk [datastore1] vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/8bf88f39-8e35-4899-ac6e-fb8395b8f926/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1248.719642] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5104dec0-24bb-4e7e-9be9-ca9c71824edd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.729053] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Waiting for the task: (returnval){ [ 1248.729053] env[61594]: value = "task-1291466" [ 1248.729053] env[61594]: _type = "Task" [ 1248.729053] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.738718] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Task: {'id': task-1291466, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.238537] env[61594]: DEBUG oslo_vmware.exceptions [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1249.238819] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1249.239376] env[61594]: ERROR nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1249.239376] env[61594]: Faults: ['InvalidArgument'] [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Traceback (most recent call last): [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] yield resources [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self.driver.spawn(context, instance, image_meta, [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self._fetch_image_if_missing(context, vi) [ 1249.239376] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] image_cache(vi, tmp_image_ds_loc) [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] vm_util.copy_virtual_disk( [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] session._wait_for_task(vmdk_copy_task) [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] return self.wait_for_task(task_ref) [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] return evt.wait() [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] result = hub.switch() [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1249.239726] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] return self.greenlet.switch() [ 1249.240069] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1249.240069] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self.f(*self.args, **self.kw) [ 1249.240069] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1249.240069] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] raise exceptions.translate_fault(task_info.error) [ 1249.240069] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1249.240069] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Faults: ['InvalidArgument'] [ 1249.240069] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] [ 1249.240069] env[61594]: INFO nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Terminating instance [ 1249.241273] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1249.241509] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.241747] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-958ce8c6-5671-4c97-bde5-b7273c48e5b3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.244022] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1249.244260] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1249.244972] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6f7946-0419-42b7-8701-bfa6a4705fc1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.251429] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1249.251632] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-418df618-06d0-45a9-8748-66fc48dd9653 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.253638] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.253811] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1249.254700] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-af0439e9-51ed-4b91-824b-9072c7c85cdd {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.259476] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Waiting for the task: (returnval){ [ 1249.259476] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52be0d3c-2abc-b252-f185-18357ec59cf0" [ 1249.259476] env[61594]: _type = "Task" [ 1249.259476] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.268960] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52be0d3c-2abc-b252-f185-18357ec59cf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.316599] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1249.316776] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1249.316933] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Deleting the datastore file [datastore1] 3a9e32f2-4300-4b44-ae16-67792000eb08 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1249.317205] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1877aa8c-5617-4af0-a396-4f503c2edd9d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.322567] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Waiting for the task: (returnval){ [ 1249.322567] env[61594]: value = "task-1291468" [ 1249.322567] env[61594]: _type = "Task" [ 1249.322567] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.329871] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Task: {'id': task-1291468, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.773130] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1249.773534] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Creating directory with path [datastore1] vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.773802] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0c20078e-d5c5-4661-8de7-459f5d48ae76 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.785283] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Created directory with path [datastore1] vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.785535] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Fetch image to [datastore1] vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1249.785771] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1249.786781] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-191ac5af-28c1-41f7-997e-2c80261ffd97 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.794041] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6721aea1-0d4b-4509-a448-94e3557cef58 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.802613] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d21daa-54d6-4f8b-a141-cead76d66f15 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.835303] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d592fdb-b40f-4838-a70b-a6e6a50e65de {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.841753] env[61594]: DEBUG oslo_vmware.api [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Task: {'id': task-1291468, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074835} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.843128] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1249.843326] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1249.843501] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1249.843682] env[61594]: INFO nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1249.845466] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6d3f1a61-7c72-4fef-b586-50726e877d36 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.847324] env[61594]: DEBUG nova.compute.claims [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1249.847497] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1249.847706] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1249.849686] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg e7b465a53a2a4c9a873da00892d2db7f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1249.868515] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1249.887131] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e7b465a53a2a4c9a873da00892d2db7f [ 1249.922861] env[61594]: DEBUG oslo_vmware.rw_handles [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1249.981545] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2ee6e8-50a4-41cf-ad71-9c5020ed1b28 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.986308] env[61594]: DEBUG oslo_vmware.rw_handles [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1249.986477] env[61594]: DEBUG oslo_vmware.rw_handles [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1249.990223] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa585ba-1fd3-42d6-86a4-fb1b89214df7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.021422] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d995177f-26ca-45aa-9885-0171f1f1d118 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.028152] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa51759c-f9ea-43aa-96e1-62ea093cd1cb {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.040566] env[61594]: DEBUG nova.compute.provider_tree [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.041041] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 7802ec5816fe4f668d6f55ee4dc040bb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.048376] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7802ec5816fe4f668d6f55ee4dc040bb [ 1250.049244] env[61594]: DEBUG nova.scheduler.client.report [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1250.051371] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 528b4ccac9054d8e92a0b58c7ed48014 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.061082] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 528b4ccac9054d8e92a0b58c7ed48014 [ 1250.061764] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.214s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.062331] env[61594]: ERROR nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1250.062331] env[61594]: Faults: ['InvalidArgument'] [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Traceback (most recent call last): [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self.driver.spawn(context, instance, image_meta, [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self._fetch_image_if_missing(context, vi) [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] image_cache(vi, tmp_image_ds_loc) [ 1250.062331] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] vm_util.copy_virtual_disk( [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] session._wait_for_task(vmdk_copy_task) [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] return self.wait_for_task(task_ref) [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] return evt.wait() [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] result = hub.switch() [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] return self.greenlet.switch() [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1250.062652] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] self.f(*self.args, **self.kw) [ 1250.062974] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1250.062974] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] raise exceptions.translate_fault(task_info.error) [ 1250.062974] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1250.062974] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Faults: ['InvalidArgument'] [ 1250.062974] env[61594]: ERROR nova.compute.manager [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] [ 1250.063286] env[61594]: DEBUG nova.compute.utils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1250.064377] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Build of instance 3a9e32f2-4300-4b44-ae16-67792000eb08 was re-scheduled: A specified parameter was not correct: fileType [ 1250.064377] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1250.064754] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1250.064924] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1250.065110] env[61594]: DEBUG nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1250.065281] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.382167] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg e2ead330d5014bcc91c7d656923c6f55 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.396025] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e2ead330d5014bcc91c7d656923c6f55 [ 1250.396636] env[61594]: DEBUG nova.network.neutron [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.397140] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg a216851920bf4e42ab00bfdc5d41e493 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.405737] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a216851920bf4e42ab00bfdc5d41e493 [ 1250.407987] env[61594]: INFO nova.compute.manager [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Took 0.34 seconds to deallocate network for instance. [ 1250.408173] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 96b56ff7f1c44b4fb16c207ba875b71f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.439269] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 96b56ff7f1c44b4fb16c207ba875b71f [ 1250.441975] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 06723c538ba94c7199da4cbb1648586c in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.472451] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 06723c538ba94c7199da4cbb1648586c [ 1250.493986] env[61594]: INFO nova.scheduler.client.report [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Deleted allocations for instance 3a9e32f2-4300-4b44-ae16-67792000eb08 [ 1250.499944] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 9598f3e99ed844fbbce7e78c6800828f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.517234] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9598f3e99ed844fbbce7e78c6800828f [ 1250.517783] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3cd9b19d-caad-4c1c-a8ab-9a374674770e tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 368.215s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.518044] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 172.660s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.518309] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Acquiring lock "3a9e32f2-4300-4b44-ae16-67792000eb08-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1250.518566] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.518761] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.520917] env[61594]: INFO nova.compute.manager [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Terminating instance [ 1250.523682] env[61594]: DEBUG nova.compute.manager [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1250.523978] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1250.524500] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a98c921a-763f-4d3d-ae05-15677087650b {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.535231] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d6826c-28d6-42b9-9042-f9fde2689fa1 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.564296] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a9e32f2-4300-4b44-ae16-67792000eb08 could not be found. [ 1250.564511] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1250.564691] env[61594]: INFO nova.compute.manager [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1250.564940] env[61594]: DEBUG oslo.service.loopingcall [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.565417] env[61594]: DEBUG nova.compute.manager [-] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1250.565518] env[61594]: DEBUG nova.network.neutron [-] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.583399] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 04aa94cac1474a5c8550d4dd6c7a5a14 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.589740] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 04aa94cac1474a5c8550d4dd6c7a5a14 [ 1250.590141] env[61594]: DEBUG nova.network.neutron [-] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.590513] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 51d973fcc6124590af05f12f4a8e702a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.598257] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 51d973fcc6124590af05f12f4a8e702a [ 1250.598712] env[61594]: INFO nova.compute.manager [-] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] Took 0.03 seconds to deallocate network for instance. [ 1250.602448] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg 8f2c1ef895c2426ba86fed9a9f80a41a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.633739] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 8f2c1ef895c2426ba86fed9a9f80a41a [ 1250.650030] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg d334dd636d4e46eb94cd3d9b4ad55893 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.694961] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg d334dd636d4e46eb94cd3d9b4ad55893 [ 1250.698461] env[61594]: DEBUG oslo_concurrency.lockutils [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.698821] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-a4d121a8-8a8a-421e-b733-f7896849c0dc tempest-ServerActionsTestOtherA-627368268 tempest-ServerActionsTestOtherA-627368268-project-member] Expecting reply to msg e9f344c7932d46d48f91e981dde3e2d5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1250.699899] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 38.406s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1250.700221] env[61594]: INFO nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 3a9e32f2-4300-4b44-ae16-67792000eb08] During sync_power_state the instance has a pending task (deleting). Skip. [ 1250.700422] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "3a9e32f2-4300-4b44-ae16-67792000eb08" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1250.709007] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg e9f344c7932d46d48f91e981dde3e2d5 [ 1293.546350] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.546846] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.890681] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 7efbc4ef14ba4951bd5ea825c300e2d2 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1294.900566] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 7efbc4ef14ba4951bd5ea825c300e2d2 [ 1295.543648] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1295.943340] env[61594]: WARNING oslo_vmware.rw_handles [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1295.943340] env[61594]: ERROR oslo_vmware.rw_handles [ 1295.943989] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1295.946113] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1295.946400] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Copying Virtual Disk [datastore1] vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/85256129-2934-4b6a-9b5a-a89fbca5e392/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1295.946727] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c05b87f4-51de-4d35-9015-315af376ffc6 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.955496] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Waiting for the task: (returnval){ [ 1295.955496] env[61594]: value = "task-1291469" [ 1295.955496] env[61594]: _type = "Task" [ 1295.955496] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.963434] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Task: {'id': task-1291469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.466458] env[61594]: DEBUG oslo_vmware.exceptions [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1296.466756] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.467375] env[61594]: ERROR nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.467375] env[61594]: Faults: ['InvalidArgument'] [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Traceback (most recent call last): [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] yield resources [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self.driver.spawn(context, instance, image_meta, [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self._fetch_image_if_missing(context, vi) [ 1296.467375] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] image_cache(vi, tmp_image_ds_loc) [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] vm_util.copy_virtual_disk( [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] session._wait_for_task(vmdk_copy_task) [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] return self.wait_for_task(task_ref) [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] return evt.wait() [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] result = hub.switch() [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1296.467745] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] return self.greenlet.switch() [ 1296.468139] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1296.468139] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self.f(*self.args, **self.kw) [ 1296.468139] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1296.468139] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] raise exceptions.translate_fault(task_info.error) [ 1296.468139] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1296.468139] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Faults: ['InvalidArgument'] [ 1296.468139] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] [ 1296.468139] env[61594]: INFO nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Terminating instance [ 1296.469281] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.469489] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.469721] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fad738c8-9d63-4b51-8c32-a15986e2e10e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.471738] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1296.471892] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquired lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.472085] env[61594]: DEBUG nova.network.neutron [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1296.472503] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg b3a17ba931ae4b36b374cdcdfed72bad in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1296.478861] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1296.479089] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=61594) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1296.479748] env[61594]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3417b74d-652a-4212-876c-3da5ce2feec5 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.482198] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b3a17ba931ae4b36b374cdcdfed72bad [ 1296.488077] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Waiting for the task: (returnval){ [ 1296.488077] env[61594]: value = "session[52335440-db12-0a44-d4d4-fbc064413e4b]52cb0e99-e96f-7c83-c34a-52695724e955" [ 1296.488077] env[61594]: _type = "Task" [ 1296.488077] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.495261] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Task: {'id': session[52335440-db12-0a44-d4d4-fbc064413e4b]52cb0e99-e96f-7c83-c34a-52695724e955, 'name': SearchDatastore_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.543550] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.543784] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1296.543938] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1296.758970] env[61594]: DEBUG nova.network.neutron [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1296.822392] env[61594]: DEBUG nova.network.neutron [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.822943] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 597a1c2ef77942b8a5c2a3fc90e7e183 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1296.830969] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 597a1c2ef77942b8a5c2a3fc90e7e183 [ 1296.831536] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Releasing lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1296.831940] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1296.832191] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1296.833241] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-773c6f8d-3bb2-45fb-95a2-8939de6e8e50 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.841245] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1296.841457] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-615aefc3-f630-49e8-8a1f-d1301aaca31c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.872745] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1296.872954] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1296.873150] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Deleting the datastore file [datastore1] 6d514620-06f3-4bd9-8d74-3f11a064ef58 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.873390] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f670349e-b17f-416e-afc9-19c62e3fc6af {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.879995] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Waiting for the task: (returnval){ [ 1296.879995] env[61594]: value = "task-1291471" [ 1296.879995] env[61594]: _type = "Task" [ 1296.879995] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.887590] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Task: {'id': task-1291471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.998412] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Preparing fetch location {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1296.998797] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Creating directory with path [datastore1] vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1296.998846] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e0bb520-0e42-42e8-8938-62f86347e2aa {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.008986] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Created directory with path [datastore1] vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a {{(pid=61594) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1297.009200] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Fetch image to [datastore1] vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1297.009373] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to [datastore1] vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1297.010083] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e5be1e-80d5-48a8-81de-ae231c8f77de {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.016386] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f56bdf-ca5a-4bab-8e13-6b7e017e893e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.024905] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9595c78f-5f23-41fd-891c-851bad3356de {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.055825] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a226445-b9d9-4b3b-957b-101d490dfc9a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.060851] env[61594]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-292c82cf-d6ac-448f-a8d7-85f9539d2e8a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.080514] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Downloading image file data 9b91196f-102b-4380-9e69-c9f71c27118a to the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1297.129454] env[61594]: DEBUG oslo_vmware.rw_handles [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1297.193908] env[61594]: DEBUG oslo_vmware.rw_handles [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Completed reading data from the image iterator. {{(pid=61594) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1297.194102] env[61594]: DEBUG oslo_vmware.rw_handles [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=61594) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1297.390360] env[61594]: DEBUG oslo_vmware.api [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Task: {'id': task-1291471, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041061} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.390360] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1297.390360] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1297.390636] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1297.390694] env[61594]: INFO nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1297.390934] env[61594]: DEBUG oslo.service.loopingcall [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1297.391155] env[61594]: DEBUG nova.compute.manager [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1297.393304] env[61594]: DEBUG nova.compute.claims [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1297.393474] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.393682] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.395507] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg cb0468bf786d49cf822ff763b5fa0bea in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.429375] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cb0468bf786d49cf822ff763b5fa0bea [ 1297.471631] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fa13a6-f3d4-43cf-84bc-7078809e1639 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.479300] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e104438e-c452-4b4d-a83c-fae2c5530530 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.510225] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b387d22-f91b-4c4f-bca6-e48fc40ee0d7 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.516903] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21fecc9-6df3-40da-8057-d8db9d7dc12d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.529287] env[61594]: DEBUG nova.compute.provider_tree [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.529791] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 2b5149937ed54044935367e5c54c6b46 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.537393] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2b5149937ed54044935367e5c54c6b46 [ 1297.538047] env[61594]: DEBUG nova.scheduler.client.report [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1297.540544] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 2db1509061f147638da3064f841a0f85 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.543975] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.554282] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2db1509061f147638da3064f841a0f85 [ 1297.554999] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.161s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.555547] env[61594]: ERROR nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.555547] env[61594]: Faults: ['InvalidArgument'] [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Traceback (most recent call last): [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self.driver.spawn(context, instance, image_meta, [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self._fetch_image_if_missing(context, vi) [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] image_cache(vi, tmp_image_ds_loc) [ 1297.555547] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] vm_util.copy_virtual_disk( [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] session._wait_for_task(vmdk_copy_task) [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] return self.wait_for_task(task_ref) [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] return evt.wait() [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] result = hub.switch() [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] return self.greenlet.switch() [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1297.556153] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] self.f(*self.args, **self.kw) [ 1297.556765] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1297.556765] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] raise exceptions.translate_fault(task_info.error) [ 1297.556765] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1297.556765] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Faults: ['InvalidArgument'] [ 1297.556765] env[61594]: ERROR nova.compute.manager [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] [ 1297.556765] env[61594]: DEBUG nova.compute.utils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1297.557847] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Build of instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 was re-scheduled: A specified parameter was not correct: fileType [ 1297.557847] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1297.558293] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1297.558529] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.558684] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquired lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.558856] env[61594]: DEBUG nova.network.neutron [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1297.559281] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 6e55c9e8da1e465f9e300b32f22e3aef in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.566932] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6e55c9e8da1e465f9e300b32f22e3aef [ 1297.585652] env[61594]: DEBUG nova.network.neutron [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1297.645820] env[61594]: DEBUG nova.network.neutron [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.646367] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 24c792c04cfd4ec0a1a03917889e6ad9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.654638] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 24c792c04cfd4ec0a1a03917889e6ad9 [ 1297.655199] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Releasing lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.655416] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1297.655602] env[61594]: DEBUG nova.compute.manager [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Skipping network deallocation for instance since networking was not requested. {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2276}} [ 1297.657259] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 20326f74333e45b1a840365ee636d8c5 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.692772] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 20326f74333e45b1a840365ee636d8c5 [ 1297.695258] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 4f76f8008dc14011afeba8e08d1c82c3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.722942] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 4f76f8008dc14011afeba8e08d1c82c3 [ 1297.744199] env[61594]: INFO nova.scheduler.client.report [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Deleted allocations for instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 [ 1297.749723] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg cde8ff19dfa0471394d67f68e3c77a14 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.763065] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg cde8ff19dfa0471394d67f68e3c77a14 [ 1297.763489] env[61594]: DEBUG oslo_concurrency.lockutils [None req-9d799411-0e05-48ed-a534-eaf776166255 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 339.687s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.763790] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 143.762s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.764088] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "6d514620-06f3-4bd9-8d74-3f11a064ef58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.764309] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.764479] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1297.766426] env[61594]: INFO nova.compute.manager [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Terminating instance [ 1297.768089] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquiring lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1297.768253] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Acquired lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.768422] env[61594]: DEBUG nova.network.neutron [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Building network info cache for instance {{(pid=61594) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1297.768817] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg c32a61d9a54946668a288dbcf2a0db41 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.776983] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c32a61d9a54946668a288dbcf2a0db41 [ 1297.798994] env[61594]: DEBUG nova.network.neutron [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1297.899320] env[61594]: DEBUG nova.network.neutron [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.899834] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg c61c5d8972df4953b4d1a2673eab107e in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.908036] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg c61c5d8972df4953b4d1a2673eab107e [ 1297.908592] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Releasing lock "refresh_cache-6d514620-06f3-4bd9-8d74-3f11a064ef58" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1297.908971] env[61594]: DEBUG nova.compute.manager [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1297.909476] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1297.909679] env[61594]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0a0d343e-70cd-496e-ba9c-48a952a77718 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.918623] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e15d61-5019-4f34-8e18-d434672d880e {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.939564] env[61594]: WARNING nova.virt.vmwareapi.vmops [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6d514620-06f3-4bd9-8d74-3f11a064ef58 could not be found. [ 1297.939758] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1297.939941] env[61594]: INFO nova.compute.manager [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Took 0.03 seconds to destroy the instance on the hypervisor. [ 1297.940196] env[61594]: DEBUG oslo.service.loopingcall [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=61594) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1297.940388] env[61594]: DEBUG nova.compute.manager [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1297.940478] env[61594]: DEBUG nova.network.neutron [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1297.955621] env[61594]: DEBUG nova.network.neutron [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Instance cache missing network info. {{(pid=61594) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1297.956075] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 2e7a6436a0e2469d8384dc4a051b0401 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.962608] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2e7a6436a0e2469d8384dc4a051b0401 [ 1297.962927] env[61594]: DEBUG nova.network.neutron [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.963305] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Expecting reply to msg 1e35c9b72fc746e98e0f65858171be61 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1297.970344] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 1e35c9b72fc746e98e0f65858171be61 [ 1297.970752] env[61594]: INFO nova.compute.manager [-] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] Took 0.03 seconds to deallocate network for instance. [ 1297.974007] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 481cdf72fb4343ff8c2a4a709b03ec70 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.001572] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 481cdf72fb4343ff8c2a4a709b03ec70 [ 1298.015521] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 5c0119d2839d4471a5dcb54e3d479860 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.052714] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5c0119d2839d4471a5dcb54e3d479860 [ 1298.055535] env[61594]: DEBUG oslo_concurrency.lockutils [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.292s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.055872] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-3f9cc7cc-c3a5-473c-a172-b13f8b6b9a41 tempest-ServerShowV257Test-578580744 tempest-ServerShowV257Test-578580744-project-member] Expecting reply to msg 42644ca81f734f0087c67289c0139716 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.056696] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 85.762s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.056939] env[61594]: INFO nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 6d514620-06f3-4bd9-8d74-3f11a064ef58] During sync_power_state the instance has a pending task (deleting). Skip. [ 1298.057164] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "6d514620-06f3-4bd9-8d74-3f11a064ef58" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.066531] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 42644ca81f734f0087c67289c0139716 [ 1298.543508] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.543773] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1298.544165] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 29cd458853d04df3b816bba13da71973 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.553466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 29cd458853d04df3b816bba13da71973 [ 1298.554466] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.554683] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.554847] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.555017] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1298.557512] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca56eaf1-08b2-4087-9468-4afe5ce88c94 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.565740] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc801ca-4e86-45a9-bb01-9e0fa3a7d164 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.579515] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c45beff-cf10-4839-9ce2-46669b5a27f0 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.585833] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66f7205-0fbc-4c88-96da-2f692c81ff40 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.614275] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181529MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1298.614449] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.614653] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.615508] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 76d0c7bcd11947f181b42547009155fb in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.629507] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 76d0c7bcd11947f181b42547009155fb [ 1298.630735] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 9db74cf887dc437898e5b51a8f1b55fe in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.644029] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 9db74cf887dc437898e5b51a8f1b55fe [ 1298.661493] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Instance 0e943fcd-8c20-4835-9e43-f636ecf73366 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=61594) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1298.661700] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1298.661843] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=640MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1298.690046] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5d669c-c7da-4010-85a8-c83eff6a90ed {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.697319] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c5853db-9808-4092-a90c-266a095a5d40 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.728898] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078e8f2a-304c-4e20-9822-c67263081dc2 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.736078] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5bd6ec0-c108-4ead-af17-68c2871a0543 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.748919] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.749392] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 992a3fe7726647c9824640cac215f35d in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.757985] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 992a3fe7726647c9824640cac215f35d [ 1298.758837] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1298.760976] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg a8f470a6827e409c88a8f59da821eb13 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1298.771237] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg a8f470a6827e409c88a8f59da821eb13 [ 1298.771816] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1298.772016] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.157s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.768407] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.769106] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 41dd98b9323c4bb6b27f3e30e96f0bf9 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1300.778666] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 41dd98b9323c4bb6b27f3e30e96f0bf9 [ 1301.543706] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1301.543901] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1301.544021] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1301.544591] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 14d270b1805849eeb91c1d396bd39fb0 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1301.553985] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 14d270b1805849eeb91c1d396bd39fb0 [ 1301.554849] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Skipping network cache update for instance because it is Building. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1301.555042] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}} [ 1345.961742] env[61594]: WARNING oslo_vmware.rw_handles [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles response.begin() [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1345.961742] env[61594]: ERROR oslo_vmware.rw_handles [ 1345.962484] env[61594]: DEBUG nova.virt.vmwareapi.images [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Downloaded image file data 9b91196f-102b-4380-9e69-c9f71c27118a to vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk on the data store datastore1 {{(pid=61594) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1345.964120] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Caching image {{(pid=61594) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1345.964372] env[61594]: DEBUG nova.virt.vmwareapi.vm_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Copying Virtual Disk [datastore1] vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a/tmp-sparse.vmdk to [datastore1] vmware_temp/3783abef-1527-4bc2-9b45-b591603be8ca/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk {{(pid=61594) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1345.964689] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4bb122f0-80ec-4778-9f57-2053823cec8d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.972203] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Waiting for the task: (returnval){ [ 1345.972203] env[61594]: value = "task-1291472" [ 1345.972203] env[61594]: _type = "Task" [ 1345.972203] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.979787] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Task: {'id': task-1291472, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.483398] env[61594]: DEBUG oslo_vmware.exceptions [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Fault InvalidArgument not matched. {{(pid=61594) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1346.483683] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9b91196f-102b-4380-9e69-c9f71c27118a/9b91196f-102b-4380-9e69-c9f71c27118a.vmdk" {{(pid=61594) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.484261] env[61594]: ERROR nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1346.484261] env[61594]: Faults: ['InvalidArgument'] [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Traceback (most recent call last): [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/compute/manager.py", line 2885, in _build_resources [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] yield resources [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self.driver.spawn(context, instance, image_meta, [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self._fetch_image_if_missing(context, vi) [ 1346.484261] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] image_cache(vi, tmp_image_ds_loc) [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] vm_util.copy_virtual_disk( [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] session._wait_for_task(vmdk_copy_task) [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] return self.wait_for_task(task_ref) [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] return evt.wait() [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] result = hub.switch() [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1346.484623] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] return self.greenlet.switch() [ 1346.484963] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1346.484963] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self.f(*self.args, **self.kw) [ 1346.484963] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1346.484963] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] raise exceptions.translate_fault(task_info.error) [ 1346.484963] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1346.484963] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Faults: ['InvalidArgument'] [ 1346.484963] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] [ 1346.484963] env[61594]: INFO nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Terminating instance [ 1346.487674] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Start destroying the instance on the hypervisor. {{(pid=61594) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3141}} [ 1346.487870] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Destroying instance {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1346.488659] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ffd0ce-ff6c-441b-9e0d-17b27c90870d {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.495186] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Unregistering the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1346.495402] env[61594]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1aed3067-018f-4d13-a140-4330933537dc {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.561034] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Unregistered the VM {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1346.561264] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Deleting contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1346.561450] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Deleting the datastore file [datastore1] 0e943fcd-8c20-4835-9e43-f636ecf73366 {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.561712] env[61594]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ae7ecb64-3f58-4add-b939-3be0d848840a {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.567226] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Waiting for the task: (returnval){ [ 1346.567226] env[61594]: value = "task-1291474" [ 1346.567226] env[61594]: _type = "Task" [ 1346.567226] env[61594]: } to complete. {{(pid=61594) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.575608] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Task: {'id': task-1291474, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.077253] env[61594]: DEBUG oslo_vmware.api [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Task: {'id': task-1291474, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063457} completed successfully. {{(pid=61594) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.077624] env[61594]: DEBUG nova.virt.vmwareapi.ds_util [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Deleted the datastore file {{(pid=61594) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.077670] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Deleted contents of the VM from datastore datastore1 {{(pid=61594) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1347.077829] env[61594]: DEBUG nova.virt.vmwareapi.vmops [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Instance destroyed {{(pid=61594) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1347.078011] env[61594]: INFO nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1347.080188] env[61594]: DEBUG nova.compute.claims [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Aborting claim: {{(pid=61594) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1347.080365] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1347.080576] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.082437] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg b1db55c028f541f087a0159cd91250f3 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.118588] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b1db55c028f541f087a0159cd91250f3 [ 1347.146560] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded3aefe-35a3-4a6d-9b20-ffc1dcb73610 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.153217] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce85e359-3103-4720-88f8-e11da96453b3 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.182227] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c11131-b662-45be-b37b-efee282002ac {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.188804] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab146f0c-06b3-49c7-9a37-2c22a945d1ee {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.201296] env[61594]: DEBUG nova.compute.provider_tree [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1347.201777] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 518b554a2dea44bdae3af78197435790 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.208911] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 518b554a2dea44bdae3af78197435790 [ 1347.209753] env[61594]: DEBUG nova.scheduler.client.report [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1347.211853] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 5b7a4e6e44b9435fb92b087bf464dd0f in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.222247] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5b7a4e6e44b9435fb92b087bf464dd0f [ 1347.222879] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.142s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.223397] env[61594]: ERROR nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1347.223397] env[61594]: Faults: ['InvalidArgument'] [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Traceback (most recent call last): [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/compute/manager.py", line 2632, in _build_and_run_instance [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self.driver.spawn(context, instance, image_meta, [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self._fetch_image_if_missing(context, vi) [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] image_cache(vi, tmp_image_ds_loc) [ 1347.223397] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] vm_util.copy_virtual_disk( [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] session._wait_for_task(vmdk_copy_task) [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] return self.wait_for_task(task_ref) [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] return evt.wait() [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] result = hub.switch() [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] return self.greenlet.switch() [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1347.223713] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] self.f(*self.args, **self.kw) [ 1347.224029] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1347.224029] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] raise exceptions.translate_fault(task_info.error) [ 1347.224029] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1347.224029] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Faults: ['InvalidArgument'] [ 1347.224029] env[61594]: ERROR nova.compute.manager [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] [ 1347.224157] env[61594]: DEBUG nova.compute.utils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] VimFaultException {{(pid=61594) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1347.225512] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Build of instance 0e943fcd-8c20-4835-9e43-f636ecf73366 was re-scheduled: A specified parameter was not correct: fileType [ 1347.225512] env[61594]: Faults: ['InvalidArgument'] {{(pid=61594) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2471}} [ 1347.225902] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Unplugging VIFs for instance {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2997}} [ 1347.226084] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=61594) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3020}} [ 1347.226261] env[61594]: DEBUG nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Deallocating network for instance {{(pid=61594) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2280}} [ 1347.226426] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] deallocate_for_instance() {{(pid=61594) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1347.474906] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg bfe6e9da917e4191a067c8fa6a6a7854 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.484015] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg bfe6e9da917e4191a067c8fa6a6a7854 [ 1347.484580] env[61594]: DEBUG nova.network.neutron [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Updating instance_info_cache with network_info: [] {{(pid=61594) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1347.485158] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 452bcb238b514cd3b64010d75eb84635 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.496336] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 452bcb238b514cd3b64010d75eb84635 [ 1347.496336] env[61594]: INFO nova.compute.manager [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] Took 0.27 seconds to deallocate network for instance. [ 1347.497907] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg 2bb3a7a6a1df4995968cc5924e05b978 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.532302] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 2bb3a7a6a1df4995968cc5924e05b978 [ 1347.535023] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg de425771b4694019819d59e0a8064ba4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.566203] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg de425771b4694019819d59e0a8064ba4 [ 1347.590134] env[61594]: INFO nova.scheduler.client.report [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Deleted allocations for instance 0e943fcd-8c20-4835-9e43-f636ecf73366 [ 1347.596157] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Expecting reply to msg ac156463105f43f59f990fa3911d2211 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1347.609480] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg ac156463105f43f59f990fa3911d2211 [ 1347.610141] env[61594]: DEBUG oslo_concurrency.lockutils [None req-0663d1cb-e614-410e-9db8-ea442706471b tempest-ServerTagsTestJSON-1574734888 tempest-ServerTagsTestJSON-1574734888-project-member] Lock "0e943fcd-8c20-4835-9e43-f636ecf73366" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 138.764s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1347.610381] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "0e943fcd-8c20-4835-9e43-f636ecf73366" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 135.316s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1347.610569] env[61594]: INFO nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] [instance: 0e943fcd-8c20-4835-9e43-f636ecf73366] During sync_power_state the instance has a pending task (spawning). Skip. [ 1347.610746] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "0e943fcd-8c20-4835-9e43-f636ecf73366" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.552195] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.544612] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.544995] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.544425] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.544851] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.544851] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1358.544964] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=61594) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10538}} [ 1359.545299] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.544874] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager.update_available_resource {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.545306] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 876f39171a3e4a298b9e431134aaabc4 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1360.556066] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 876f39171a3e4a298b9e431134aaabc4 [ 1360.556986] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.557195] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.557369] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1360.557526] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=61594) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1360.558657] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3f32b6-007d-4079-a7b3-54963ea630b8 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.567241] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ecd5d0-5f1d-4f1c-bd86-ad81f54df530 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.580601] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d6844b-4534-4753-8cb6-ef9c57f14691 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.586481] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495cae4a-04c2-4ed3-9f79-62327361944f {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.614803] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181532MB free_disk=139GB free_vcpus=48 pci_devices=None {{(pid=61594) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1360.614944] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1360.615155] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1360.615958] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 6f04ea41803247519a315bb69f0dcc3a in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1360.625688] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 6f04ea41803247519a315bb69f0dcc3a [ 1360.626347] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg b623ecce0ce841ca9fde9dd3c41349c6 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1360.635568] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg b623ecce0ce841ca9fde9dd3c41349c6 [ 1360.649906] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1360.650092] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=61594) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1360.662977] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a35873b-b1d5-41fe-8889-d04e3ffc1c51 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.669991] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e558bb1-93d3-4708-b8aa-32eb07a296df {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.700321] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3059a5-b766-4e26-bc5e-b365b441fc8c {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.707519] env[61594]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c4283b-fdda-4c4e-87b3-2af76a04f047 {{(pid=61594) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.720096] env[61594]: DEBUG nova.compute.provider_tree [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed in ProviderTree for provider: f0ff3a26-85e8-47dd-b241-86a582e8d4be {{(pid=61594) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.720532] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 89ab6655cc9d42ec93d81d016d79c147 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1360.728611] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 89ab6655cc9d42ec93d81d016d79c147 [ 1360.729495] env[61594]: DEBUG nova.scheduler.client.report [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Inventory has not changed for provider f0ff3a26-85e8-47dd-b241-86a582e8d4be based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 139, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=61594) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1360.731571] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 5f7dea123bdf4bb49e040f6409891737 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1360.741972] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 5f7dea123bdf4bb49e040f6409891737 [ 1360.742616] env[61594]: DEBUG nova.compute.resource_tracker [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=61594) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1360.743062] env[61594]: DEBUG oslo_concurrency.lockutils [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.128s {{(pid=61594) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.744028] env[61594]: DEBUG oslo_service.periodic_task [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=61594) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.744028] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Starting heal instance info cache {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9919}} [ 1363.744028] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Rebuilding the list of instances to heal {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9923}} [ 1363.744466] env[61594]: INFO oslo_messaging._drivers.amqpdriver [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Expecting reply to msg 97e74accf0274a769bd1ca238239c071 in queue reply_c2db91d1f18a4ea8b8ecaf10b8005455 [ 1363.753349] env[61594]: INFO oslo_messaging._drivers.amqpdriver [-] Received RPC response for msg 97e74accf0274a769bd1ca238239c071 [ 1363.753700] env[61594]: DEBUG nova.compute.manager [None req-6c6b7998-5df8-4f3b-94d7-97cde67d1691 None None] Didn't find any instances for network info cache update. {{(pid=61594) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10005}}